Skip to content

Commit e200409

Browse files
shirzady1934shirzady1934
and
shirzady1934
authored
removed export_cpp_header in the whole of code (#537)
Co-authored-by: shirzady1934 <shirzady1934@gmail.com>
1 parent b47f53f commit e200409

File tree

7 files changed

+0
-151
lines changed

7 files changed

+0
-151
lines changed

nam/models/conv_net.py

Lines changed: 0 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -173,38 +173,6 @@ def _num_layers(self) -> int:
173173
def _batchnorm(self) -> bool:
174174
return _BATCHNORM_NAME in self._net._modules["block_0"]._modules
175175

176-
def export_cpp_header(self, filename: _Path):
177-
with _TemporaryDirectory() as tmpdir:
178-
tmpdir = _Path(tmpdir)
179-
self.export(_Path(tmpdir))
180-
with open(_Path(tmpdir, "config.json"), "r") as fp:
181-
_c = _json.load(fp)
182-
version = _c["version"]
183-
config = _c["config"]
184-
with open(filename, "w") as f:
185-
f.writelines(
186-
(
187-
"#pragma once\n",
188-
"// Automatically-generated model file\n",
189-
"#include <vector>\n",
190-
f'#define PYTHON_MODEL_VERSION "{version}"\n',
191-
f"const int CHANNELS = {config['channels']};\n",
192-
f"const bool BATCHNORM = {'true' if config['batchnorm'] else 'false'};\n",
193-
"std::vector<int> DILATIONS{"
194-
+ ",".join([str(d) for d in config["dilations"]])
195-
+ "};\n",
196-
f"const std::string ACTIVATION = \"{config['activation']}\";\n",
197-
"std::vector<float> PARAMS{"
198-
+ ",".join(
199-
[
200-
f"{w:.16f}"
201-
for w in _np.load(_Path(tmpdir, "weights.npy"))
202-
]
203-
)
204-
+ "};\n",
205-
)
206-
)
207-
208176
def _export_config(self):
209177
return {
210178
"channels": self._channels,

nam/models/exportable.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -102,14 +102,6 @@ def export(
102102
# And resume training state
103103
self.train(training)
104104

105-
@_abc.abstractmethod
106-
def export_cpp_header(self, filename: _Path):
107-
"""
108-
Export a .h file to compile into the plugin with the weights written right out
109-
as text
110-
"""
111-
pass
112-
113105
def export_onnx(self, filename: _Path):
114106
"""
115107
Export model in format for ONNX Runtime

nam/models/linear.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,6 @@ def pad_start_default(self) -> bool:
2727
def receptive_field(self) -> int:
2828
return self._net.weight.shape[2]
2929

30-
def export_cpp_header(self):
31-
raise NotImplementedError()
32-
3330
@property
3431
def _bias(self) -> bool:
3532
return self._net.bias is not None

nam/models/recurrent.py

Lines changed: 0 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -129,36 +129,6 @@ def pad_start_default(self) -> bool:
129129
# I should simplify this...
130130
return True
131131

132-
def export_cpp_header(self, filename: _Path):
133-
with _TemporaryDirectory() as tmpdir:
134-
tmpdir = _Path(tmpdir)
135-
LSTM.export(self, _Path(tmpdir)) # Hacky...need to work w/ CatLSTM
136-
with open(_Path(tmpdir, "model.nam"), "r") as fp:
137-
_c = _json.load(fp)
138-
version = _c["version"]
139-
config = _c["config"]
140-
s_parametric = self._export_cpp_header_parametric(config.get("parametric"))
141-
with open(filename, "w") as f:
142-
f.writelines(
143-
(
144-
"#pragma once\n",
145-
"// Automatically-generated model file\n",
146-
"#include <vector>\n",
147-
'#include "json.hpp"\n',
148-
'#include "lstm.h"\n',
149-
f'#define PYTHON_MODEL_VERSION "{version}"\n',
150-
f'const int NUM_LAYERS = {config["num_layers"]};\n',
151-
f'const int INPUT_SIZE = {config["input_size"]};\n',
152-
f'const int HIDDEN_SIZE = {config["hidden_size"]};\n',
153-
)
154-
+ s_parametric
155-
+ (
156-
"std::vector<float> PARAMS{"
157-
+ ", ".join([f"{w:.16f}f" for w in _c["weights"]])
158-
+ "};\n",
159-
)
160-
)
161-
162132
def _apply_head(self, features: _torch.Tensor) -> _torch.Tensor:
163133
"""
164134
:param features: (B,S,DH)
@@ -242,13 +212,6 @@ def _export_config(self):
242212
"hidden_size": self._core.hidden_size,
243213
"num_layers": self._core.num_layers,
244214
}
245-
246-
def _export_cpp_header_parametric(self, config):
247-
# TODO refactor to merge w/ WaveNet implementation
248-
if config is not None:
249-
raise ValueError("Got non-None parametric config")
250-
return ("nlohmann::json PARAMETRIC {};\n",)
251-
252215
def _export_weights(self):
253216
"""
254217
* Loop over cells:

nam/models/wavenet.py

Lines changed: 0 additions & 66 deletions
Original file line numberDiff line numberDiff line change
@@ -356,67 +356,6 @@ def pad_start_default(self) -> bool:
356356
def receptive_field(self) -> int:
357357
return self._net.receptive_field
358358

359-
def export_cpp_header(self, filename: _Path):
360-
with _TemporaryDirectory() as tmpdir:
361-
tmpdir = _Path(tmpdir)
362-
WaveNet.export(self, _Path(tmpdir)) # Hacky...need to work w/ CatWaveNet
363-
with open(_Path(tmpdir, "model.nam"), "r") as fp:
364-
_c = _json.load(fp)
365-
version = _c["version"]
366-
config = _c["config"]
367-
368-
if config["head"] is not None:
369-
raise NotImplementedError("No heads yet")
370-
# head_scale
371-
# with_head
372-
# parametric
373-
374-
# String for layer array params:
375-
s_lap = (
376-
"const std::vector<wavenet::LayerArrayParams> LAYER_ARRAY_PARAMS{\n",
377-
)
378-
for i, lc in enumerate(config["layers"], 1):
379-
s_lap_line = (
380-
f' wavenet::LayerArrayParams({lc["input_size"]}, '
381-
f'{lc["condition_size"]}, {lc["head_size"]}, {lc["channels"]}, '
382-
f'{lc["kernel_size"]}, std::vector<int> '
383-
"{"
384-
+ ", ".join([str(d) for d in lc["dilations"]])
385-
+ "}, "
386-
+ (
387-
f'"{lc["activation"]}", {str(lc["gated"]).lower()}, '
388-
f'{str(lc["head_bias"]).lower()})'
389-
)
390-
)
391-
if i < len(config["layers"]):
392-
s_lap_line += ","
393-
s_lap_line += "\n"
394-
s_lap += (s_lap_line,)
395-
s_lap += ("};\n",)
396-
s_parametric = self._export_cpp_header_parametric(config.get("parametric"))
397-
with open(filename, "w") as f:
398-
f.writelines(
399-
(
400-
"#pragma once\n",
401-
"// Automatically-generated model file\n",
402-
"#include <vector>\n",
403-
'#include "json.hpp"\n',
404-
'#include "wavenet.h"\n',
405-
f'#define PYTHON_MODEL_VERSION "{version}"\n',
406-
)
407-
+ s_lap
408-
+ (
409-
f'const float HEAD_SCALE = {config["head_scale"]};\n',
410-
"const bool WITH_HEAD = false;\n",
411-
)
412-
+ s_parametric
413-
+ (
414-
"std::vector<float> PARAMS{"
415-
+ ", ".join([f"{w:.16f}f" for w in _c["weights"]])
416-
+ "};\n",
417-
)
418-
)
419-
420359
def import_weights(self, weights: _Sequence[float]):
421360
if not isinstance(weights, _torch.Tensor):
422361
weights = _torch.Tensor(weights)
@@ -425,11 +364,6 @@ def import_weights(self, weights: _Sequence[float]):
425364
def _export_config(self):
426365
return self._net.export_config()
427366

428-
def _export_cpp_header_parametric(self, config):
429-
if config is not None:
430-
raise ValueError("Got non-None parametric config")
431-
return ("nlohmann::json PARAMETRIC {};\n",)
432-
433367
def _export_weights(self) -> _np.ndarray:
434368
return self._net.export_weights()
435369

tests/test_nam/test_models/test_base.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,8 +31,6 @@ def pad_start_default(self) -> bool:
3131
def receptive_field(self) -> int:
3232
return 1
3333

34-
def export_cpp_header(self, filename: Path):
35-
pass
3634

3735
def _export_config(self):
3836
pass

tests/test_nam/test_models/test_exportable.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -200,9 +200,6 @@ def __init__(self):
200200
def forward(self, x: torch.Tensor):
201201
return self._scale * x + self._bias
202202

203-
def export_cpp_header(self, filename: Path):
204-
pass
205-
206203
def _export_config(self):
207204
return {}
208205

0 commit comments

Comments
 (0)