|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
from enum import IntEnum |
|
import json |
|
from multiprocessing import Pool |
|
import pathlib |
|
|
|
import numpy as np |
|
import pandas as pd |
|
|
|
BRANCH_ARTIFACTS_DIR = ( |
|
pathlib.Path(__file__).parent.resolve() |
|
/ "googleapiclient" |
|
/ "discovery_cache" |
|
/ "documents" |
|
) |
|
MAIN_ARTIFACTS_DIR = ( |
|
pathlib.Path(__file__).parent.resolve() |
|
/ ".." |
|
/ "main" |
|
/ "googleapiclient" |
|
/ "discovery_cache" |
|
/ "documents" |
|
) |
|
|
|
MULTIPROCESSING_NUM_PER_BATCH = 5 |
|
MULTIPROCESSING_NUM_AGENTS = 10 |
|
|
|
|
|
class ChangeType(IntEnum): |
|
UNKNOWN = 0 |
|
DELETED = 1 |
|
ADDED = 2 |
|
CHANGED = 3 |
|
|
|
|
|
class DirectoryDoesNotExist(ValueError): |
|
"""Raised when the specified directory does not exist.""" |
|
|
|
pass |
|
|
|
|
|
class ChangeSummary: |
|
"""Represents the change summary between 2 directories containing \ |
|
artifacts. |
|
""" |
|
|
|
def __init__(self, new_artifacts_dir, current_artifacts_dir, temp_dir, file_list): |
|
"""Initializes an instance of a ChangeSummary. |
|
|
|
Args: |
|
new_artifacts_dir (str): The relative path to the directory with the |
|
new discovery artifacts. |
|
current_artifacts_dir (str): The relative path to the directory with |
|
the current discovery artifacts. |
|
temp_dir (str): The relative path to the directory used for |
|
temporary storage where intermediate files will be stored. |
|
file_list (list): A list of strings containing files to analyze. |
|
""" |
|
|
|
self._file_list = file_list |
|
self._new_artifacts_dir = pathlib.Path(new_artifacts_dir) |
|
self._current_artifacts_dir = pathlib.Path(current_artifacts_dir) |
|
self._temp_dir = pathlib.Path(temp_dir) |
|
|
|
|
|
self._raise_if_directory_not_found(self._new_artifacts_dir) |
|
self._raise_if_directory_not_found(self._current_artifacts_dir) |
|
self._raise_if_directory_not_found(self._temp_dir) |
|
|
|
def _raise_if_directory_not_found(self, directory): |
|
"""Raises if the `directory` doesn't exist |
|
|
|
args: |
|
directory (str): The relative path to the `directory` |
|
""" |
|
|
|
if not pathlib.Path(directory).exists(): |
|
raise DirectoryDoesNotExist( |
|
"Directory does not exist : {0}".format(directory) |
|
) |
|
|
|
def _load_json_to_dataframe(self, file_path): |
|
"""Returns a pandas dataframe from the json file provided. |
|
|
|
args: |
|
file_path (str): The relative path to the discovery artifact to |
|
parse. |
|
""" |
|
|
|
|
|
|
|
dataframe_doc = pd.DataFrame() |
|
|
|
if pathlib.Path(file_path).is_file(): |
|
with open(file_path, "r") as f: |
|
|
|
dataframe_doc = pd.json_normalize(json.load(f)) |
|
return dataframe_doc |
|
|
|
def _get_discovery_differences(self, filename): |
|
"""Returns a pandas dataframe which contains the differences with the |
|
current and new discovery artifact directories, corresponding to the |
|
file name provided. |
|
|
|
args: |
|
filename (str): The name of the discovery artifact to parse. |
|
""" |
|
|
|
current_artifact_path = self._current_artifacts_dir / filename |
|
new_artifact_path = self._new_artifacts_dir / filename |
|
|
|
|
|
|
|
current_doc = self._load_json_to_dataframe(current_artifact_path) |
|
new_doc = self._load_json_to_dataframe(new_artifact_path) |
|
|
|
|
|
|
|
|
|
combined_docs = ( |
|
pd.concat([current_doc, new_doc], keys=["CurrentValue", "NewValue"]) |
|
|
|
.reset_index(drop=True, level=1) |
|
|
|
|
|
.rename_axis(["Key"], axis=1).transpose() |
|
|
|
.reset_index() |
|
) |
|
|
|
|
|
|
|
if "CurrentValue" not in combined_docs.columns: |
|
combined_docs["CurrentValue"] = np.nan |
|
|
|
|
|
|
|
if "NewValue" not in combined_docs.columns: |
|
combined_docs["NewValue"] = np.nan |
|
|
|
|
|
|
|
|
|
parent_child_df = combined_docs["Key"].str.rsplit(".", n=1, expand=True) |
|
|
|
|
|
|
|
if len(parent_child_df.columns) == 1: |
|
parent_child_df.columns = ["Parent"] |
|
else: |
|
parent_child_df.columns = ["Parent", "Child"] |
|
combined_docs = combined_docs.join(parent_child_df) |
|
|
|
|
|
combined_docs["Added"] = np.where( |
|
combined_docs["CurrentValue"].isnull(), True, False |
|
) |
|
|
|
|
|
combined_docs["Deleted"] = np.where( |
|
combined_docs["NewValue"].isnull(), True, False |
|
) |
|
|
|
|
|
|
|
parent_added_agg = ( |
|
combined_docs.groupby("Parent") |
|
.Added.value_counts(normalize=True) |
|
.reset_index(name="Proportion") |
|
) |
|
|
|
|
|
|
|
parent_added_agg["NumLevels"] = ( |
|
parent_added_agg["Parent"].str.split(".").apply(lambda x: len(x)) |
|
) |
|
|
|
|
|
|
|
parent_deleted_agg = ( |
|
combined_docs.groupby("Parent") |
|
.Deleted.value_counts(normalize=True) |
|
.reset_index(name="Proportion") |
|
) |
|
|
|
|
|
|
|
parent_deleted_agg["NumLevels"] = ( |
|
parent_added_agg["Parent"].str.split(".").apply(lambda x: len(x)) |
|
) |
|
|
|
|
|
|
|
|
|
all_added = ( |
|
parent_added_agg[ |
|
(parent_added_agg["Proportion"] == 1) |
|
& (parent_added_agg["Added"] == True) |
|
][["Parent", "NumLevels"]] |
|
.sort_values("NumLevels", ascending=True) |
|
.Parent.to_list() |
|
) |
|
|
|
|
|
|
|
|
|
all_deleted = ( |
|
parent_deleted_agg[ |
|
(parent_deleted_agg["Proportion"] == 1) |
|
& (parent_deleted_agg["Deleted"] == True) |
|
][["Parent", "NumLevels"]] |
|
.sort_values("NumLevels", ascending=True) |
|
.Parent.to_list() |
|
) |
|
|
|
|
|
|
|
|
|
|
|
for i in range(0, len(all_added)): |
|
word = all_added[i] |
|
combined_docs.Parent = np.where( |
|
combined_docs["Parent"].str.startswith(word), word, combined_docs.Parent |
|
) |
|
|
|
|
|
|
|
|
|
|
|
for i in range(0, len(all_deleted)): |
|
word = all_deleted[i] |
|
combined_docs.Parent = np.where( |
|
combined_docs["Parent"].str.startswith(word), word, combined_docs.Parent |
|
) |
|
|
|
|
|
docs_diff = combined_docs[ |
|
combined_docs["CurrentValue"] != combined_docs["NewValue"] |
|
].copy(deep=False) |
|
|
|
|
|
api_version_string = filename.split(".")[:-1] |
|
|
|
docs_diff["Name"] = api_version_string[0] |
|
docs_diff["Version"] = ".".join(api_version_string[1:]) |
|
|
|
|
|
|
|
deleted_condition = docs_diff["NewValue"].isnull() |
|
added_condition = docs_diff["CurrentValue"].isnull() |
|
|
|
|
|
|
|
|
|
|
|
|
|
docs_diff["ChangeType"] = np.where( |
|
deleted_condition, |
|
ChangeType.DELETED, |
|
np.where(added_condition, ChangeType.ADDED, ChangeType.CHANGED), |
|
) |
|
|
|
|
|
|
|
|
|
docs_diff = docs_diff[ |
|
~docs_diff["Key"].str.contains( |
|
"|".join(self._get_keys_to_ignore()), case=False |
|
) |
|
] |
|
|
|
|
|
|
|
|
|
|
|
docs_diff_with_count = ( |
|
docs_diff.groupby( |
|
["Parent", "Added", "Deleted", "Name", "Version", "ChangeType"] |
|
) |
|
.size() |
|
.reset_index(name="Count") |
|
) |
|
|
|
|
|
docs_diff = docs_diff.merge(docs_diff_with_count) |
|
|
|
|
|
|
|
docs_diff.loc[docs_diff["Count"] > 1, "Key"] = docs_diff["Parent"] |
|
|
|
return docs_diff[ |
|
["Key", "Added", "Deleted", "Name", "Version", "ChangeType", "Count"] |
|
].drop_duplicates() |
|
|
|
def _build_summary_message(self, api_name, is_feature): |
|
"""Returns a string containing the summary for a given api. The string |
|
returned will be in the format `fix(<api_name>): update the API` |
|
when `is_feature=False` and `feat(<api_name>)!: update the API` |
|
when `is_feature=True`. |
|
|
|
args: |
|
api_name (str): The name of the api to include in the summary. |
|
is_feature (bool): If True, include the prefix `feat` otherwise use |
|
`fix` |
|
""" |
|
|
|
|
|
commit_type = "feat" if is_feature else "fix" |
|
return "{0}({1}): update the api".format(commit_type, api_name) |
|
|
|
def _get_keys_to_ignore(self): |
|
"""Returns a list of strings with keys to ignore because they rarely |
|
affect functionality. |
|
|
|
args: None |
|
""" |
|
keys_to_ignore = [ |
|
"description", |
|
"documentation", |
|
"enum", |
|
"etag", |
|
"revision", |
|
"title", |
|
"url", |
|
"rootUrl", |
|
] |
|
return keys_to_ignore |
|
|
|
def _get_stable_versions(self, versions): |
|
"""Returns a pandas series `pd.Series()` of boolean values, |
|
corresponding to the given series, indicating whether the version is |
|
considered stable or not. |
|
args: |
|
versions (object): a pandas series containing version |
|
information for all discovery artifacts. |
|
""" |
|
|
|
|
|
|
|
|
|
return versions.str.extract(r"(v\d?\.?\d?\.?\d+$)").notnull() |
|
|
|
def _get_summary_and_write_to_disk(self, dataframe, directory): |
|
"""Writes summary information to file about changes made to discovery |
|
artifacts based on the provided dataframe and returns a dataframe |
|
with the same. The file `'allapis.dataframe'` is saved to the current |
|
working directory. |
|
args: |
|
dataframe (object): a pandas dataframe containing summary change |
|
information for all discovery artifacts |
|
directory (str): path where the summary file should be saved |
|
""" |
|
|
|
dataframe["IsStable"] = self._get_stable_versions(dataframe["Version"]) |
|
|
|
|
|
|
|
|
|
filter_features = (dataframe["ChangeType"] == ChangeType.DELETED) | ( |
|
dataframe["ChangeType"] == ChangeType.ADDED |
|
) |
|
|
|
|
|
|
|
dataframe["IsFeature"] = np.where(filter_features, True, np.nan) |
|
|
|
|
|
|
|
|
|
dataframe["IsFeatureAggregate"] = dataframe.groupby("Name").IsFeature.transform( |
|
lambda x: x.any() |
|
) |
|
|
|
|
|
|
|
dataframe["Summary"] = np.vectorize(self._build_summary_message)( |
|
dataframe["Name"], dataframe["IsFeatureAggregate"] |
|
) |
|
|
|
|
|
|
|
dataframe.to_csv(directory / "allapis.dataframe") |
|
return dataframe |
|
|
|
def _write_verbose_changes_to_disk(self, dataframe, directory, summary_df): |
|
"""Writes verbose information to file about changes made to discovery |
|
artifacts based on the provided dataframe. A separate file is saved |
|
for each api in the current working directory. The extension of the |
|
files will be `'.verbose'`. |
|
|
|
args: |
|
dataframe (object): a pandas dataframe containing verbose change |
|
information for all discovery artifacts |
|
directory (str): path where the summary file should be saved |
|
summary_df (object): A dataframe containing a summary of the changes |
|
""" |
|
|
|
|
|
verbose_changes = [] |
|
|
|
|
|
dataframe.sort_values( |
|
by=["Name", "Version", "ChangeType"], ascending=True, inplace=True |
|
) |
|
|
|
|
|
|
|
|
|
|
|
change_type_groups = dataframe[ |
|
["Name", "Version", "ChangeType", "Key", "Count"] |
|
].groupby(["Name", "Version", "ChangeType"]) |
|
|
|
lastApi = "" |
|
lastVersion = "" |
|
lastType = ChangeType.UNKNOWN |
|
|
|
f = None |
|
for name, group in change_type_groups: |
|
currentApi = name[0] |
|
currentVersion = name[1] |
|
currentType = name[2] |
|
|
|
|
|
|
|
if lastApi != currentApi: |
|
|
|
|
|
if f is not None: |
|
f.writelines(verbose_changes) |
|
f.close() |
|
f = None |
|
|
|
|
|
verbose_changes = [] |
|
lastVersion = "" |
|
|
|
|
|
filename = "{0}.verbose".format(currentApi) |
|
f = open(pathlib.Path(directory / filename), "a") |
|
lastApi = currentApi |
|
|
|
|
|
current_api_filter = summary_df["Name"] == currentApi |
|
|
|
|
|
|
|
|
|
|
|
|
|
verbose_changes.append(summary_df[current_api_filter].Summary.iloc[0]) |
|
|
|
|
|
|
|
if lastVersion != currentVersion: |
|
|
|
verbose_changes.append( |
|
"\n\n#### {0}:{1}\n\n".format(currentApi, currentVersion) |
|
) |
|
|
|
lastVersion = currentVersion |
|
lastType = ChangeType.UNKNOWN |
|
|
|
|
|
|
|
if currentType != lastType: |
|
if currentType == ChangeType.DELETED: |
|
verbose_changes.append("\nThe following keys were deleted:\n") |
|
elif currentType == ChangeType.ADDED: |
|
verbose_changes.append("\nThe following keys were added:\n") |
|
else: |
|
verbose_changes.append("\nThe following keys were changed:\n") |
|
|
|
lastType = currentType |
|
|
|
|
|
|
|
verbose_changes.extend( |
|
[ |
|
"- {0} (Total Keys: {1})\n".format(row["Key"], row["Count"]) |
|
for index, row in group[["Key", "Count"]].iterrows() |
|
] |
|
) |
|
|
|
|
|
if f is not None: |
|
f.writelines(verbose_changes) |
|
f.close() |
|
f = None |
|
|
|
def detect_discovery_changes(self): |
|
"""Writes a summary of the changes to the discovery artifacts to disk |
|
at the path specified in `temp_dir`. |
|
|
|
args: None |
|
""" |
|
result = pd.DataFrame() |
|
|
|
with Pool(processes=MULTIPROCESSING_NUM_AGENTS) as pool: |
|
if len(self._file_list): |
|
result = pd.concat( |
|
pool.map( |
|
self._get_discovery_differences, |
|
self._file_list, |
|
MULTIPROCESSING_NUM_PER_BATCH, |
|
) |
|
) |
|
|
|
if len(result): |
|
|
|
|
|
sort_columns = ["Name", "Version", "ChangeType", "Key"] |
|
result.sort_values(by=sort_columns, ascending=True, inplace=True) |
|
|
|
|
|
|
|
pathlib.Path(self._temp_dir).mkdir(exist_ok=True) |
|
|
|
|
|
|
|
summary_df = self._get_summary_and_write_to_disk(result, self._temp_dir) |
|
|
|
|
|
|
|
self._write_verbose_changes_to_disk(result, self._temp_dir, summary_df) |
|
|