File size: 14,244 Bytes
c7743b0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
import os.path
from dataclasses import dataclass
from typing import Tuple, List, Union

import matplotlib
import pandas as pd
from PIL import Image
from matplotlib import axes
from pandas import DataFrame
from pandas.plotting._matplotlib.style import get_standard_colors
from tensorboard.compat.proto import event_pb2

from dreambooth.shared import status


@dataclass
class YAxis:
    name: str
    columns: List[str]


@dataclass
class PlotDefinition:
    title: str
    x_axis: str
    y_axis: List[YAxis]


@dataclass()
class ParsedValues:
    loss: DataFrame
    lr: DataFrame
    ram: DataFrame
    merged: bool


class LogParser:

    def __init__(self):
        self.logging_dir = None
        self.model_name = None
        self.parsed = {}
        self.out_loss = []
        self.out_lr = []
        self.out_ram = []
        self.parsed_files = {}
        self.smoothing_window = 50

    # Call this when switching models
    def reset(self):
        self.parsed = {}
        self.out_loss = []
        self.out_lr = []
        self.out_ram = []
        self.parsed_files = {}

    def plot_multi_alt(
            self,
            data: pd.DataFrame,
            plot_definition: PlotDefinition,
            spacing: float = 0.1,
    ):
        styles = ["-", ":", "--", "-."]
        colors = get_standard_colors(num_colors=7)
        loss_color = colors[0]
        avg_colors = colors[1:]
        for i, yi in enumerate(plot_definition.y_axis):
            if len(yi.columns) > len(styles):
                raise ValueError(
                    f"Maximum {len(styles)} traces per yaxis allowed. If we want to allow this we need to add some logic.")
            if i > len(colors):
                raise ValueError(
                    f"Maximum {len(colors)} yaxis axis allowed. If we want to allow this we need to add some logic.")

            if i == 0:
                ax = data.plot(
                    x=plot_definition.x_axis,
                    y=yi.columns,
                    title=plot_definition.title,
                    color=[loss_color] * len(yi.columns)
                )
                ax.set_ylabel(ylabel=yi.name)

            else:
                # Multiple y-axes
                ax_new = ax.twinx()
                ax_new.spines["right"].set_position(("axes", 1 + spacing * (i - 1)))
                data.plot(
                    ax=ax_new,
                    x=plot_definition.x_axis,
                    y=yi.columns,
                    color=[avg_colors[yl] for yl in range(len(yi.columns))]
                )
                ax_new.set_ylabel(ylabel=yi.name)

        ax.legend(loc=0)

        return ax

    def plot_multi(
            self,
            data: pd.DataFrame,
            x: Union[str, None] = None,
            y: Union[List[str], None] = None,
            spacing: float = 0.1,
            **kwargs
    ) -> matplotlib.axes.Axes:
        """Plot multiple Y axes on the same chart with same x axis.

        Args:
            data: dataframe which contains x and y columns
            x: column to use as x axis. If None, use index.
            y: list of columns to use as Y axes. If None, all columns are used
                except x column.
            spacing: spacing between the plots
            **kwargs: keyword arguments to pass to data.plot()

        Returns:
            a matplotlib.axes.Axes object returned from data.plot()

        Example:

        See Also:
            This code is mentioned in https://stackoverflow.com/q/11640243/2593810
        """

        # Get default color style from pandas - can be changed to any other color list
        if y is None:
            y = data.columns

        # remove x_col from y_cols
        if x:
            y = [col for col in y if col != x]

        if len(y) == 0:
            return
        colors = get_standard_colors(num_colors=len(y))

        if "legend" not in kwargs:
            kwargs["legend"] = False  # prevent multiple legends

        # First axis
        ax = data.plot(x=x, y=y[0], color=colors[0], **kwargs)
        ax.set_ylabel(ylabel=y[0])
        lines, labels = ax.get_legend_handles_labels()

        for i in range(1, len(y)):
            # Multiple y-axes
            ax_new = ax.twinx()
            ax_new.spines["right"].set_position(("axes", 1 + spacing * (i - 1)))
            data.plot(
                ax=ax_new, x=x, y=y[i], color=colors[i % len(colors)], **kwargs
            )
            ax_new.set_ylabel(ylabel=y[i])

            # Proper legend position
            line, label = ax_new.get_legend_handles_labels()
            lines += line
            labels += label

        ax.legend(lines, labels, loc=0)

        return ax

    def parse_logs(self, model_name: str, for_ui: bool = False):
        """Convert local TensorBoard data into Pandas DataFrame.

        Function takes the root directory path and recursively parses
        all events data.
        If the `sort_by` value is provided then it will use that column
        to sort values; typically `wall_time` or `step`.

        *Note* that the whole data is converted into a DataFrame.
        Depending on the data size this might take a while. If it takes
        too long then narrow it to some sub-directories.

        Paramters:
            model_name: (str) path to db model config/dir.
            for_ui: (bool) Generate UI-formatted text outputs.

        Returns:
            pandas.DataFrame with [wall_time, name, step, value] columns.

        """
        matplotlib.use("Agg")
        if for_ui:
            print("Generating graphs?")
            status.textinfo = "Generating graphs"

        def convert_tfevent(filepath) -> Tuple[DataFrame, DataFrame, DataFrame, bool]:
            loss_events = []
            lr_events = []
            ram_events = []
            instance_loss_events = []
            prior_loss_events = []
            has_all = False
            try:
                import tensorflow
            except:
                print("Unable to import tensorflow")
                return pd.DataFrame(loss_events), pd.DataFrame(lr_events), pd.DataFrame(ram_events), has_all

            serialized_examples = tensorflow.data.TFRecordDataset(filepath)

            for serialized_example in serialized_examples:
                e = event_pb2.Event.FromString(serialized_example.numpy())
                if len(e.summary.value):
                    parsed = parse_tfevent(e)
                    if parsed["Name"] == "lr":
                        lr_events.append(parsed)
                    elif parsed["Name"] == "loss":
                        loss_events.append(parsed)
                    elif parsed["Name"] == "vram_usage" or parsed["Name"] == "vram":
                        ram_events.append(parsed)
                    elif parsed["Name"] == "instance_loss" or parsed["Name"] == "inst_loss":
                        instance_loss_events.append(parsed)
                    elif parsed["Name"] == "prior_loss":
                        prior_loss_events.append(parsed)

            merged_events = []

            has_all = True
            for le in loss_events:
                lr = next((item for item in lr_events if item["Step"] == le["Step"]), None)
                instance_loss = next((item for item in instance_loss_events if item["Step"] == le["Step"]), None)
                prior_loss = next((item for item in prior_loss_events if item["Step"] == le["Step"]), None)
                if lr is not None and instance_loss is not None and prior_loss is not None:
                    le["LR"] = lr["Value"]
                    le["Loss"] = le["Value"]
                    le["Instance_Loss"] = instance_loss["Value"]
                    le["Prior_Loss"] = prior_loss["Value"]
                    merged_events.append(le)
                else:
                    has_all = False
            if has_all:
                loss_events = merged_events

            return pd.DataFrame(loss_events), pd.DataFrame(lr_events), pd.DataFrame(ram_events), has_all

        def parse_tfevent(tfevent):
            return {
                "Wall_time": tfevent.wall_time,
                "Name": tfevent.summary.value[0].tag,
                "Step": tfevent.step,
                "Value": float(tfevent.summary.value[0].simple_value),
            }

        try:
            from dreambooth.dataclasses.db_config import from_file  # noqa
        except:
            from core.modules.dreambooth.dreambooth.dataclasses.db_config import from_file # noqa
        model_config = from_file(model_name)
        print(f"Model name: {model_name}")
        if model_config is None:
            print("Unable to load model config!")
            return None
        self.smoothing_window = int(model_config.graph_smoothing)
        if self.model_name != model_name:
            if for_ui:
                print(f"Setting model name: {self.model_name}")

            self.reset()
            self.model_name = model_name

        self.logging_dir = os.path.join(model_config.model_dir, "logging", "dreambooth")

        columns_order = ['Wall_time', 'Name', 'Step', 'Value']
        if for_ui:
            print(f"Walking: {self.logging_dir}")

        for (root, _, filenames) in os.walk(self.logging_dir):
            for filename in filenames:
                if "events.out.tfevents" not in filename and "dreambooth.events" not in filename:
                    continue
                file_full_path = os.path.join(root, filename)
                f_time = os.path.getmtime(file_full_path)
                do_parse = True
                if file_full_path in self.parsed_files.keys():
                    e_time = self.parsed_files[file_full_path]
                    if e_time != f_time:
                        print(f"Log file updated, re-parsing: {file_full_path}")
                    else:
                        print(f"Log file unchanged, nothing to do: {file_full_path}")
                        do_parse = False
                if do_parse:
                    self.parsed_files[file_full_path] = f_time
                    converted_loss, converted_lr, converted_ram, merged = convert_tfevent(file_full_path)
                    self.parsed[file_full_path] = ParsedValues(converted_loss, converted_lr, converted_ram, merged)

        out_loss = []
        out_lr = []
        out_ram = []
        has_all_lr = True

        for file, data in self.parsed.items():
            out_loss.append(data.loss)
            out_lr.append(data.lr)
            out_ram.append(data.ram)
            if not data.merged:
                has_all_lr = False

        loss_columns = columns_order
        if has_all_lr:
            loss_columns = ['Wall_time', 'Name', 'Step', 'Loss', "LR", "Instance_Loss", "Prior_Loss"]
        # Concatenate (and sort) all partial individual dataframes
        all_df_loss = pd.concat(out_loss)[loss_columns]
        all_df_loss = all_df_loss.fillna(method="ffill")
        all_df_loss = all_df_loss.sort_values("Wall_time")
        all_df_loss = all_df_loss.reset_index(drop=True)
        sw = int(self.smoothing_window if self.smoothing_window < len(all_df_loss) / 3 else len(all_df_loss) / 3)
        all_df_loss = all_df_loss.rolling(sw).mean(numeric_only=True)

        out_images = []
        out_names = []
        status.job_count = 2
        status.job_no = 1
        status.textinfo = "Plotting data..."
        if has_all_lr:
            plotted_loss = self.plot_multi_alt(
                all_df_loss,
                plot_definition=PlotDefinition(
                    title=f"Loss Average/Learning Rate ({model_config.lr_scheduler})",
                    x_axis="Step",
                    y_axis=[
                        YAxis(name="LR", columns=["LR"]),
                        YAxis(name="Loss", columns=["Instance_Loss", "Prior_Loss", "Loss"]),

                    ]
                )
            )
            loss_name = "Loss Average/Learning Rate"
        else:
            plotted_loss = all_df_loss.plot(x="Step", y="Value", title="Loss Averages")
            loss_name = "Loss Averages"
            all_df_lr = pd.concat(out_lr)[columns_order]
            all_df_lr = all_df_lr.sort_values("Wall_time")
            all_df_lr = all_df_lr.reset_index(drop=True)
            all_df_lr = all_df_lr.rolling(self.smoothing_window).mean(numeric_only=True)
            plotted_lr = all_df_lr.plot(x="Step", y="Value", title="Learning Rate")
            lr_img = os.path.join(model_config.model_dir, "logging", f"lr_plot_{model_config.revision}.png")
            plotted_lr.figure.savefig(lr_img)
            matplotlib.pyplot.close(plotted_lr.figure)
            log_lr = Image.open(lr_img)
            out_images.append(log_lr)
            out_names.append("Learning Rate")

        status.job_no = 2
        status.textinfo = "Saving graph data..."
        loss_img = os.path.join(model_config.model_dir, "logging", f"loss_plot_{model_config.revision}.png")
        print(f"Saving {loss_img}")
        plotted_loss.figure.savefig(loss_img)
        matplotlib.pyplot.close(plotted_loss.figure)
        log_pil = Image.open(loss_img)
        out_images.append(log_pil)
        out_names.append(loss_name)
        try:
            all_df_ram = pd.concat(out_ram)[columns_order]
            all_df_ram = all_df_ram.sort_values("Wall_time")
            all_df_ram = all_df_ram.reset_index(drop=True)
            all_df_ram = all_df_ram.rolling(self.smoothing_window).mean(numeric_only=True)
            plotted_ram = all_df_ram.plot(x="Step", y="Value", title="VRAM Usage")

            ram_img = os.path.join(model_config.model_dir, "logging", f"ram_plot_{model_config.revision}.png")
            print(f"Saving {ram_img}")
            plotted_ram.figure.savefig(ram_img)
            matplotlib.pyplot.close(plotted_ram.figure)
            out_images.append(ram_img)
            out_names.append("VRAM Usage")
            if for_ui:
                out_names = "<br>".join(out_names)
        except:
            pass

        del out_loss
        del out_lr
        del out_ram
        try:
            matplotlib.pyplot.close()
        except:
            pass
        print("Cleanup log parse.")
        return out_images, out_names