code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
import os
import sys
import shutil
from subprocess import call
from annogesiclib.multiparser import Multiparser
from annogesiclib.helper import Helper
from annogesiclib.gff3 import Gff3Parser
from annogesiclib.get_Rfam_ribo import rbs_from_rfam
from annogesiclib.extract_RBS import extract_potential_rbs
from annogesiclib.recompute_RBS import regenerate_seq, reextract_rbs
from annogesiclib.ribo_gff import stat_and_covert2gff
from annogesiclib.modify_rbs_table import modify_table
from annogesiclib.map_ribos import mapping_ribos
from annogesiclib.rbs_overlap import rbs_overlap
class Ribos(object):
'''detection of riboswitch and RNA thermometer'''
def __init__(self, args_ribo):
self.multiparser = Multiparser()
self.helper = Helper()
self.gff_parser = Gff3Parser()
self.gff_path = os.path.join(args_ribo.gffs, "tmp")
if args_ribo.tsss is not None:
self.tss_path = os.path.join(args_ribo.tsss, "tmp")
else:
self.tss_path = None
self.tran_path = os.path.join(args_ribo.trans, "tmp")
self.fasta_path = os.path.join(args_ribo.fastas, "tmp")
if (args_ribo.program == "both") or (
args_ribo.program == "riboswitch"):
(self.ribos_stat_folder, self.ribos_gff_outfolder,
self.ribos_table_folder, self.ribos_scan_folder,
self.ribos_tmp_files, self.ribos_rfam,
self.ribos_suffixs) = self._create_out_folders(
args_ribo.ribos_out_folder, "riboswitch",
args_ribo.database)
if (args_ribo.program == "both") or (
args_ribo.program == "thermometer"):
(self.thermo_stat_folder, self.thermo_gff_outfolder,
self.thermo_table_folder, self.thermo_scan_folder,
self.thermo_tmp_files, self.thermo_rfam,
self.thermo_suffixs) = self._create_out_folders(
args_ribo.thermo_out_folder, "RNA_thermometer",
args_ribo.database)
def _create_out_folders(self, out_folder, feature, database):
stat_folder = os.path.join(out_folder, "statistics")
gff_outfolder = os.path.join(out_folder, "gffs")
table_folder = os.path.join(out_folder, "tables")
scan_folder = os.path.join(out_folder, "scan_Rfam_results")
tmp_files = {"fasta": os.path.join(
out_folder, "tmp_fasta"),
"scan": os.path.join(
out_folder, "tmp_scan"),
"table": os.path.join(
out_folder, "tmp_table")}
rfam = os.path.join(database, "Rfam_" + feature + ".cm")
suffixs = {"csv": feature + ".csv",
"txt": feature + "_prescan.txt",
"re_txt": feature + "_scan.txt",
"re_csv": feature + "_scan.csv"}
return (stat_folder, gff_outfolder, table_folder, scan_folder,
tmp_files, rfam, suffixs)
def _run_cmscan(self, args_ribo, seq, type_, prefix, tmp_files,
suffixs, rfam, log):
scan_file = os.path.join(tmp_files["scan"],
"_".join([prefix, suffixs[type_]]))
scan = open(scan_file, "w")
if args_ribo.cutoff.split("_")[0] == "e":
value = args_ribo.cutoff.split("_")[-1]
log.write(" ".join([args_ribo.cmscan_path, "--incE",
value, "--acc", rfam, seq]) + "\n")
call([args_ribo.cmscan_path, "--incE",
value, "--acc", rfam, seq], stdout=scan)
elif args_ribo.cutoff.split("_")[0] == "s":
value = args_ribo.cutoff.split("_")[-1]
log.write(" ".join([args_ribo.cmscan_path, "--incT",
value, "--acc", rfam, seq]) + "\n")
call([args_ribo.cmscan_path, "--incT",
value, "--acc", rfam, seq], stdout=scan)
else:
print("Error: the --cutoff needs to start from 'e' "
"(e value) or 's' (score)!")
log.write("the --cutoff needs to start from 'e' "
"(e value) or 's' (score).\n")
sys.exit()
scan.close()
log.write("Done!\n")
log.write("\t" + scan_file + " is temporary generated.\n")
return scan_file
def _scan_extract_rfam(self, prefixs, args_ribo, tmp_files, suffixs,
feature, rfam, log):
'''extract the seq of candidates and scanning the candidates'''
for gff in os.listdir(self.gff_path):
if gff.endswith(".gff"):
prefix = gff.replace(".gff", "")
tran_file = os.path.join(self.tran_path, prefix + "_transcript.gff")
first_seq = os.path.join(tmp_files["fasta"], prefix + ".fa")
if (os.path.exists(tran_file)):
first_seq = os.path.join(tmp_files["fasta"], prefix + ".fa")
print("Extracting sequences of candidates for {0}".format(
prefix))
if self.tss_path is not None:
tss_file = os.path.join(self.tss_path, prefix + "_TSS.gff")
else:
tss_file = None
log.write("Running extract_RBS.py to extract potential "
"sequences of riboswitches/RNA thermometers for "
"{0}.\n".format(prefix))
extract_potential_rbs(
os.path.join(self.fasta_path, prefix + ".fa"),
os.path.join(self.gff_path, gff), tss_file,
tran_file, first_seq, args_ribo, feature)
log.write("\t" + first_seq + " is temporary generated.\n")
print("Pre-scanning of {0}".format(prefix))
log.write("Using Infernal to pre-scan riboswitches/RNA "
"thermometers for {0}.\n".format(prefix))
log.write("Please make sure the version of Infernal is at least 1.1.1.\n")
if (os.stat(first_seq).st_size != 0):
prefixs.append(prefix)
first_scan_file = self._run_cmscan(
args_ribo, first_seq, "txt", prefix, tmp_files,
suffixs, rfam, log)
sec_seq = os.path.join(tmp_files["fasta"],
"_".join([prefix, "regenerate.fa"]))
first_table = os.path.join(
tmp_files["table"],
"_".join([prefix, suffixs["csv"]]))
log.write("Running recompute_RBS.py to update the "
"potential sequences of riboswitches/RNA "
"thermometers for {0} based on the "
"pre-scanning results.\n".format(prefix))
regenerate_seq(first_scan_file, first_seq,
first_table, sec_seq)
log.write("\t" + sec_seq + " is temporary generated.\n")
print("Scanning of {0}".format(prefix))
log.write("Using Infernal to scan riboswitches/RNA "
"thermometers for {0}.\n".format(prefix))
log.write("Please make sure the version of Infernal "
"is at least 1.1.1.\n")
if (os.stat(sec_seq).st_size != 0):
sec_scan_file = self._run_cmscan(
args_ribo, sec_seq, "re_txt", prefix, tmp_files,
suffixs, rfam, log)
sec_table = os.path.join(
tmp_files["table"],
"_".join([prefix, suffixs["re_csv"]]))
log.write("Running recompute_RBS.py and modify_rbs_table.py "
"to generate tables for {0} "
"based on the scanning results.\n".format(prefix))
reextract_rbs(sec_scan_file, first_table, sec_table,
args_ribo.cutoff)
shutil.move(sec_table, first_table)
modify_table(first_table, args_ribo.output_all)
return prefixs
def _merge_results(self, args_ribo, scan_folder, suffixs, tmp_files,
table_folder, stat_folder, feature_id, gff_outfolder,
feature, log, prefixs):
'''merge the results from the results of two searching'''
for gff in os.listdir(args_ribo.gffs):
if gff.endswith(".gff"):
prefix = gff.replace(".gff", "")
print("Merging results of {0}".format(prefix))
pre_strain = ""
self.helper.check_make_folder(os.path.join(
scan_folder, prefix))
fh = open(os.path.join(args_ribo.gffs, gff))
log.write("Merging the results from Infernal to generate "
"tables for {0}.\n".format(prefix))
for entry in self.gff_parser.entries(fh):
if (entry.seq_id != pre_strain) and (entry.seq_id in prefixs):
if len(pre_strain) == 0:
shutil.copyfile(os.path.join(
tmp_files["table"],
"_".join([entry.seq_id, suffixs["csv"]])),
os.path.join(
table_folder,
"_".join([prefix, suffixs["csv"]])))
else:
self.helper.merge_file(os.path.join(
tmp_files["table"],
"_".join([entry.seq_id, suffixs["csv"]])),
os.path.join(
table_folder,
"_".join([prefix, suffixs["csv"]])))
shutil.copy(os.path.join(
tmp_files["scan"],
"_".join([entry.seq_id, suffixs["txt"]])),
os.path.join(scan_folder, prefix))
sec_scan = os.path.join(
tmp_files["scan"],
"_".join([entry.seq_id, suffixs["re_txt"]]))
if os.path.exists(sec_scan):
shutil.copy(sec_scan,
os.path.join(scan_folder, prefix))
pre_strain = entry.seq_id
log.write("The following files are generated.\n")
for folder in (table_folder, scan_folder):
for file_ in os.listdir(folder):
log.write("\t" + os.path.join(folder, file_) + "\n")
out_stat = os.path.join(
stat_folder,
"_".join(["stat", prefix, feature + ".txt"]))
print("Computing statistics of {0}".format(prefix))
log.write("Running ribo_gff.py to do statistics and generate "
"gff files for {0}.\n".format(prefix))
log.write("The following files are generated:\n")
out_gff = os.path.join(gff_outfolder, "_".join([
prefix, feature + ".gff"]))
stat_and_covert2gff(os.path.join(
table_folder, "_".join([prefix, suffixs["csv"]])),
feature_id, out_gff,
args_ribo.fuzzy, out_stat, feature)
log.write("\t" + out_gff + "\n")
log.write("\t" + out_stat + "\n")
fh.close()
def _remove_tmp(self, args_ribo):
self.helper.remove_tmp_dir(args_ribo.gffs)
self.helper.remove_tmp_dir(args_ribo.fastas)
self.helper.remove_tmp_dir(args_ribo.trans)
self.helper.remove_tmp_dir(args_ribo.tsss)
def _remove_overlap(self, gff_path, tmp_files, suffixs, type_, fuzzy,
log, prefixs):
log.write("Running rbs_overlap.py to remove the overlapping "
"riboswitches/RNA thermometers.\n")
for gff in os.listdir(gff_path):
if gff.endswith(".gff") and (gff.replace(".gff", "") in prefixs):
tmp_table = os.path.join(os.path.join(
tmp_files["table"], "_".join([
gff.replace(".gff", ""), suffixs["csv"]])))
rbs_overlap(tmp_table,
os.path.join(gff_path, gff), type_, fuzzy)
log.write("\t" + tmp_table + " is updated.\n")
def _core_prediction(self, args_ribo, feature_id, rfam, tmp_files,
table_folder, feature, scan_folder, suffixs,
stat_folder, gff_outfolder, out_folder, type_, log):
'''main part of detection'''
log.write("Running get_Rfam_ribo.py to get the information of "
"riboswitches/RNA thermometers from Rfam.\n")
rbs_from_rfam(feature_id, args_ribo.rfam, rfam)
log.write("Using Infernal to compress the Rfam data of "
"riboswitches/RNA thermometers.\n")
log.write("Please make sure the version of Infernal is at least 1.1.1.\n")
print("Compressing Rfam of " + feature)
log.write(" ".join([args_ribo.cmpress_path, "-F", rfam]) + "\n")
call([args_ribo.cmpress_path, "-F", rfam])
log.write("Done!\n")
prefixs = []
self.helper.check_make_folder(tmp_files["fasta"])
self.helper.check_make_folder(tmp_files["scan"])
self.helper.check_make_folder(tmp_files["table"])
prefixs = self._scan_extract_rfam(
prefixs, args_ribo, tmp_files, suffixs, feature, rfam, log)
self._remove_overlap(self.gff_path, tmp_files, suffixs, type_,
args_ribo.fuzzy, log, prefixs)
self._merge_results(args_ribo, scan_folder, suffixs, tmp_files,
table_folder, stat_folder, feature_id,
gff_outfolder, feature, log, prefixs)
log.write("Running map_ribos.py to extract all the details from Rfam.\n")
mapping_ribos(table_folder, feature_id, feature)
log.write("The following files are updated:\n")
for file_ in os.listdir(table_folder):
log.write("\t" + os.path.join(table_folder, file_) + "\n")
self.helper.remove_all_content(out_folder, "tmp", "dir")
def run_ribos(self, args_ribo, log_t, log_r):
if args_ribo.fuzzy_rbs > 6:
if log_t is not None:
log_t.write("--fuzzy_rbs should be equal or less than 6!\n")
if log_r is not None:
log_r.write("--fuzzy_rbs should be equal or less than 6!\n")
print("Error: --fuzzy_rbs should be equal or less than 6!")
sys.exit()
self.multiparser.parser_gff(args_ribo.gffs, None)
self.multiparser.parser_fasta(args_ribo.fastas)
self.multiparser.parser_gff(args_ribo.trans, "transcript")
if args_ribo.tsss is not None:
self.multiparser.parser_gff(args_ribo.tsss, "TSS")
for gff in os.listdir(args_ribo.gffs):
if gff.endswith(".gff"):
self.helper.check_uni_attributes(os.path.join(
args_ribo.gffs, gff))
if (args_ribo.program.lower() == "both") or (
args_ribo.program.lower() == "riboswitch"):
print("Detecting riboswtiches now")
self._core_prediction(
args_ribo, args_ribo.ribos_id, self.ribos_rfam,
self.ribos_tmp_files, self.ribos_table_folder,
"riboswitch", self.ribos_scan_folder, self.ribos_suffixs,
self.ribos_stat_folder, self.ribos_gff_outfolder,
args_ribo.ribos_out_folder, "riboswitch", log_r)
if (args_ribo.program.lower() == "both") or (
args_ribo.program.lower() == "thermometer"):
print("Detecting RNA thermometers now")
self._core_prediction(
args_ribo, args_ribo.thermo_id, self.thermo_rfam,
self.thermo_tmp_files, self.thermo_table_folder,
"RNA_thermometer", self.thermo_scan_folder,
self.thermo_suffixs, self.thermo_stat_folder,
self.thermo_gff_outfolder, args_ribo.thermo_out_folder,
"thermometer", log_t)
self._remove_tmp(args_ribo) | ANNOgesic | /ANNOgesic-1.1.14.linux-x86_64.tar.gz/usr/local/lib/python3.10/dist-packages/annogesiclib/ribos.py | ribos.py |
import os
import sys
import shutil
from annogesiclib.multiparser import Multiparser
from annogesiclib.helper import Helper
from annogesiclib.sORF_intergenic import get_intergenic
from annogesiclib.sORF_detection import sorf_detection
from annogesiclib.stat_sorf import stat
from annogesiclib.reorganize_table import reorganize_table
class sORFDetection(object):
'''detection of sORF'''
def __init__(self, args_sorf):
self.multiparser = Multiparser()
self.helper = Helper()
if args_sorf.tsss is not None:
self.tss_path = os.path.join(args_sorf.tsss, "tmp")
else:
self.tss_path = None
if args_sorf.srnas is not None:
self.srna_path = os.path.join(args_sorf.srnas, "tmp")
else:
self.srna_path = None
self.gff_output = os.path.join(args_sorf.out_folder, "gffs")
self.table_output = os.path.join(args_sorf.out_folder, "tables")
self.tran_path = os.path.join(args_sorf.trans, "tmp")
self.fasta_path = os.path.join(args_sorf.fastas, "tmp")
self.all_cand = "all_candidates"
self.best = "best_candidates"
def _check_gff(self, gffs):
for gff in os.listdir(gffs):
if gff.endswith(".gff"):
self.helper.check_uni_attributes(os.path.join(gffs, gff))
def _check_necessary_files(self, args_sorf, log):
if (args_sorf.gffs is None) or (args_sorf.trans is None) or (
(args_sorf.tex_wigs is None) and (args_sorf.frag_wigs is None)):
print("Error: lack required files!")
log.write("genome annotation, transcript file or wiggle files "
"are not assigned.\n")
sys.exit()
if args_sorf.utr_detect:
if (args_sorf.tsss is None):
print("Error: TSS files are required for UTR derived"
" sORF detection!")
log.write("TSS files are required for UTR derived"
" sORF detection!\n")
sys.exit()
self._check_gff(args_sorf.gffs)
self.multiparser.parser_gff(args_sorf.gffs, None)
if args_sorf.tsss is not None:
self._check_gff(args_sorf.tsss)
self.multiparser.parser_gff(args_sorf.tsss, "TSS")
self.multiparser.combine_gff(args_sorf.gffs, self.tss_path,
None, "TSS")
self._check_gff(args_sorf.trans)
if args_sorf.srnas is not None:
self._check_gff(args_sorf.srnas)
self.multiparser.parser_gff(args_sorf.srnas, "sRNA")
self.multiparser.combine_gff(args_sorf.gffs, self.srna_path,
None, "sRNA")
def _start_stop_codon(self, prefixs, args_sorf, log):
'''detect the sORF based on start and stop codon
and ribosome binding site'''
log.write("Running sORF_detection.py for detecting sORFs.\n")
log.write("The following files are generated:\n")
for prefix in prefixs:
print("Searching sORFs of {0}".format(prefix))
if self.srna_path is not None:
srna_file = os.path.join(self.srna_path,
"_".join([prefix, "sRNA.gff"]))
else:
srna_file = None
if self.tss_path is not None:
tss_file = os.path.join(self.tss_path,
"_".join([prefix, "TSS.gff"]))
else:
tss_file = None
sorf_detection(os.path.join(self.fasta_path, prefix + ".fa"),
srna_file, os.path.join(args_sorf.out_folder,
"_".join([prefix, "inter.gff"])), tss_file,
os.path.join(args_sorf.wig_path,
"_".join([prefix, "forward.wig"])),
os.path.join(args_sorf.wig_path,
"_".join([prefix, "reverse.wig"])),
os.path.join(self.gff_output, self.all_cand,
"_".join([prefix, "sORF"])), args_sorf)
if "_".join([prefix, "sORF_all.gff"]) in os.listdir(
os.path.join(self.gff_output, self.all_cand)):
gff_all = os.path.join(self.gff_output, self.all_cand,
"_".join([prefix, "sORF.gff"]))
gff_best = os.path.join(self.gff_output, self.best,
"_".join([prefix, "sORF.gff"]))
csv_all = os.path.join(self.table_output, self.all_cand,
"_".join([prefix, "sORF.csv"]))
csv_best = os.path.join(self.table_output, self.best,
"_".join([prefix, "sORF.csv"]))
shutil.move(os.path.join(self.gff_output, self.all_cand,
"_".join([prefix, "sORF_all.gff"])), gff_all)
shutil.move(os.path.join(self.gff_output, self.all_cand,
"_".join([prefix, "sORF_best.gff"])), gff_best)
shutil.move(os.path.join(self.gff_output, self.all_cand,
"_".join([prefix, "sORF_all.csv"])), csv_all)
shutil.move(os.path.join(self.gff_output, self.all_cand,
"_".join([prefix, "sORF_best.csv"])), csv_best)
log.write("\t" + gff_all + "\n")
log.write("\t" + gff_best + "\n")
log.write("\t" + csv_all + "\n")
log.write("\t" + csv_best + "\n")
def _remove_tmp(self, args_sorf):
self.helper.remove_all_content(args_sorf.out_folder, ".gff", "file")
self.helper.remove_tmp_dir(args_sorf.fastas)
self.helper.remove_tmp_dir(args_sorf.gffs)
self.helper.remove_tmp_dir(args_sorf.tsss)
self.helper.remove_tmp_dir(args_sorf.trans)
self.helper.remove_tmp_dir(args_sorf.srnas)
if "temp_wig" in os.listdir(args_sorf.out_folder):
shutil.rmtree(os.path.join(args_sorf.out_folder, "temp_wig"))
if "merge_wigs" in os.listdir(args_sorf.out_folder):
shutil.rmtree(os.path.join(args_sorf.out_folder, "merge_wigs"))
def _compare_tran_cds(self, args_sorf, log):
'''compare transcript and CDS to find the intergenic region'''
prefixs = []
log.write("Running sORF_intergenic.py to extract the sequences of "
"potential sORFs\n")
for gff in os.listdir(args_sorf.gffs):
if gff.endswith(".gff"):
prefix = gff.replace(".gff", "")
prefixs.append(prefix)
print("Comparing transcripts and CDSs of {0}".format(prefix))
get_intergenic(os.path.join(args_sorf.gffs, gff),
os.path.join(self.tran_path,
"_".join([prefix, "transcript.gff"])),
os.path.join(args_sorf.out_folder,
"_".join([prefix, "inter.gff"])),
args_sorf.utr_detect, args_sorf.hypo,
args_sorf.extend_5, args_sorf.extend_3)
log.write("\t" + os.path.join(args_sorf.out_folder,
"_".join([prefix, "inter.gff"])) +
" is generated to temporary store the sequences.\n")
return prefixs
def _re_table(self, args_sorf, prefixs, log):
log.write("Running re_table.py for generating coverage information.\n")
log.write("The following files are updated:\n")
for type_ in ["all_candidates", "best_candidates"]:
for prefix in prefixs:
table_file = os.path.join(args_sorf.out_folder, "tables",
type_, "_".join([
prefix, "sORF.csv"]))
reorganize_table(args_sorf.libs, args_sorf.merge_wigs,
"Track_detail", table_file)
log.write("\t" + table_file + "\n")
def run_sorf_detection(self, args_sorf, log):
if args_sorf.fuzzy_rbs > 6:
log.write("--fuzzy_rbs should be equal or less than 6!\n")
print("Error: --fuzzy_rbs should be equal or less than 6!")
sys.exit()
self._check_necessary_files(args_sorf, log)
self.multiparser.parser_gff(args_sorf.trans, "transcript")
self.multiparser.combine_gff(args_sorf.gffs, self.tran_path,
None, "transcript")
self.multiparser.parser_fasta(args_sorf.fastas)
self.multiparser.combine_fasta(args_sorf.gffs, self.fasta_path, None)
prefixs = self._compare_tran_cds(args_sorf, log)
self._start_stop_codon(prefixs, args_sorf, log)
log.write("Running stat_sorf.py to do statistics.\n")
for sorf in os.listdir(os.path.join(self.gff_output, self.all_cand)):
print("Running statistics of {0}".format(sorf))
if sorf.endswith("_sORF.gff"):
stat_file = os.path.join(args_sorf.out_folder, "statistics",
"_".join(["stat", sorf.replace(".gff", ".csv")]))
stat(os.path.join(self.gff_output, self.all_cand, sorf),
os.path.join(self.gff_output, self.best, sorf), stat_file,
args_sorf.utr_detect)
log.write("\t" + stat_file + " is generated.\n")
self._re_table(args_sorf, prefixs, log)
self._remove_tmp(args_sorf) | ANNOgesic | /ANNOgesic-1.1.14.linux-x86_64.tar.gz/usr/local/lib/python3.10/dist-packages/annogesiclib/sorf.py | sorf.py |
import os
import networkx as nx
import matplotlib
matplotlib.use('Agg')
from matplotlib.offsetbox import AnchoredText
import matplotlib.pyplot as plt
plt.rcParams['image.cmap'] = 'RdBu_r'
def node(item, nodes, center, colors, labels1, labels2):
if item not in nodes:
nodes.append(item)
if len(item) > 5:
labels2[item] = item
labels1[item] = ""
else:
labels1[item] = item
labels2[item] = ""
if (center["locus_tag"] == item) or \
(center["gene_name"] == item):
colors[item] = '#FFFF66'
else:
colors[item] = '#CCFFCC'
def get_largest_compare(tick, score):
same = True
if (score >= 20) and tick >= 20:
pass
else:
if score != tick:
same = False
if score > tick:
if score >= 20:
tick = 20
else:
tick = score
return (tick, same)
def add_edge(G, ppi, style, weight, colorppi):
G.add_edge(ppi["item_a"], ppi["item_b"],
color=float(colorppi), style=style, weight=weight)
def add_node(G, nodes):
G.add_nodes_from(nodes)
def best_assign_attributes(check_na, G, ppi, pre_ppi, first, style):
check_na["best"] = True
if ppi["score"] == 0:
if ppi["below"] >= 20:
weight = 22
else:
weight = ppi["below"] + 1
else:
if ppi["score"] >= 20:
weight = 22
else:
weight = ppi["score"] + ppi["below"] + 1
add_edge(G, ppi, style, weight, ppi["best"])
if not first:
if pre_ppi["best"] != ppi["best"]:
check_na["same_best"] = True
def create_node(ppis, scores, nodes, center, colors, labels1, labels2, edges,
G, cutoff_score, check_na, pre_ppi):
first = True
for ppi in ppis:
scores.append(ppi["score"])
node(ppi["item_a"], nodes, center, colors, labels1, labels2)
node(ppi["item_b"], nodes, center, colors, labels1, labels2)
if ((ppi["item_a"], ppi["item_b"]) not in edges) or \
((ppi["item_b"], ppi["item_a"]) not in edges):
edges.append((ppi["item_a"], ppi["item_b"]))
if ppi["best"] == "NA":
add_edge(G, ppi, 'dashed', 1, -1)
elif float(ppi["best"]) <= cutoff_score:
best_assign_attributes(check_na, G, ppi,
pre_ppi, first, "dashdot")
else:
best_assign_attributes(check_na, G, ppi,
pre_ppi, first, "solid")
pre_ppi = ppi
first = False
add_node(G, nodes)
return pre_ppi
def modify_label(labels2, new_labels):
for key, value in labels2.items():
if "_" in value:
new_labels[key] = value.replace("_", "\n")
else:
new_labels[key] = value
def plot_text(check_na, plt, ppis, ppi, color_edge):
na = False
if check_na["na"]:
na = True
elif check_na["best"]:
if len(ppis) < 2:
na = True
else:
cbar = plt.colorbar(color_edge)
cbar.ax.tick_params(labelsize=16)
return na
def nx_node(G, pos, node_size, colors, color_list):
'''draw the node'''
nx.draw_networkx_nodes(G, pos, node_size=node_size, node_shape='o',
nodelist=colors.keys(), node_color=color_list,
linewidths=1)
def nx_edge(G, pos, edges, colors, styles, weights):
'''draw the edge'''
color_edge = (nx.draw_networkx_edges(G, pos, edgelist=edges,
edge_color=colors, style=styles, width=weights,
edge_vmin=-1, edge_vmax=1))
return color_edge
def nx_label(G, pos, labels, size):
'''setup the label of network'''
nx.draw_networkx_labels(G, pos, labels, font_size=size, font_weight='bold')
def nx_color_style(G, edges):
'''setup the color of network'''
colors = []
styles = []
check_na = True
for u, v in edges:
colors.append(G[u][v]['color'])
styles.append(G[u][v]['style'])
if (G[u][v]['style'] == "solid") or (
G[u][v]['style'] == "dashdot"):
check_na = False
return colors, styles, check_na
def print_title(plt, na, center):
if not na:
plt.title("|".join([center["locus_tag"],
" ".join([center["gene_name"],
"(based on the score of best literature)"])]),
fontsize="16")
else:
plt.title("|".join([center["locus_tag"],
" ".join([center["gene_name"],
"(based on the score of best literature)"])]) + \
"\n the numbers of supported literatures in all interactions are 0",
fontsize="16")
def plot(ppis, center, strain, cutoff_score, node_size, out_folder):
nodes = []
edges = []
labels1 = {}
labels2 = {}
colors = {}
check_na = {"number": False, "best": False, "na": False,
"same_number": False, "same_best": False}
pre_ppi = ""
scores = []
weights = []
plt.figure(figsize=(15, 15))
G = nx.Graph()
pre_ppi = create_node(ppis, scores, nodes, center, colors,
labels1, labels2, edges, G,
cutoff_score, check_na, pre_ppi)
pos = nx.spring_layout(G, k=2, scale=3, iterations=20)
color_list = []
for color in colors.values():
color_list.append(color)
nx_node(G, pos, node_size, colors, color_list)
connects = G.edges()
for weight in G.edges(data=True):
if weight[2]["weight"] <= 30:
weights.append(weight[2]["weight"])
else:
weights.append(30)
colors, styles, check_na["na"] = nx_color_style(G, connects)
color_edge = nx_edge(G, pos, connects, colors, styles, weights)
nx_label(G, pos, labels1, 12)
new_labels = {}
modify_label(labels2, new_labels)
nx_label(G, pos, new_labels, 10)
na = plot_text(check_na, plt, ppis, pre_ppi, color_edge)
print_title(plt, na, center)
plt.axis('off')
if strain not in os.listdir(out_folder):
os.mkdir(os.path.join(out_folder, strain))
plt.savefig(os.path.join(out_folder, strain,
"_".join([center["locus_tag"], center["gene_name"] + ".png"])),
bbox_inches="tight")
plt.clf()
plt.close('all')
return check_na
def score_compare(score, scores, cutoff_score, ppi):
'''check the number of literatures which are pass the cutoff'''
if score == "NA":
ppi["score"] = 0
ppi["below"] = 0
elif float(score) >= cutoff_score:
scores["score"] += 1
else:
scores["below"] += 1
def assign_score_below(pre_ppi, scores, ppis):
if "score" not in pre_ppi.keys():
pre_ppi["score"] = scores["score"]
if "below" not in pre_ppi.keys():
pre_ppi["below"] = scores["below"]
ppis.append(pre_ppi)
def get_best(pre_ppi, ppi, row):
'''get the best score of PPI'''
if "best" not in pre_ppi.keys():
ppi["best"] = row[8]
else:
if pre_ppi["best"] == "NA":
ppi["best"] = row[8]
else:
if float(pre_ppi["best"]) < float(row[8]):
ppi["best"] = row[8]
else:
ppi["best"] = pre_ppi["best"]
def interaction(first, pre_ppi, scores, ppis, match, center, cutoff_score,
node_size, out_folder):
'''check the interaction of two proteins'''
if first:
pass
else:
assign_score_below(pre_ppi, scores, ppis)
if match:
plot(ppis, center, pre_ppi["strain"], cutoff_score,
node_size, out_folder)
match = False
else:
print("No interacted partner with {0} | {1}".format(
center["locus_tag"], center["gene_name"]))
scores = {"score": 0, "below": 0}
ppis = []
first = True
return first, scores, match, ppis
def plot_ppi(PPI_file, cutoff_score, out_folder, node_size):
'''plot the network of PPI'''
ppis = []
first = True
pre_ppi = None
scores = {"score": 0, "below": 0}
center = {}
start = False
match = False
with open(PPI_file) as fh:
for line in fh:
line = line.strip()
row = line.split("\t")
start = True
if row[0].startswith("Interaction"):
first, scores, match, ppis = interaction(
first, pre_ppi, scores, ppis, match, center,
cutoff_score, node_size, out_folder)
datas = row[0].split(" | ")
center["locus_tag"] = datas[0].split(" ")[-1]
center["gene_name"] = datas[-1]
print("Plotting {0}".format(center["gene_name"]))
elif row[0] == "Genome":
pass
else:
ppi = {"strain": row[0], "item_a": row[1], "item_b": row[2]}
if (ppi["item_a"] == center["locus_tag"]) or (
ppi["item_a"] == center["gene_name"]) or (
ppi["item_b"] == center["locus_tag"]) or (
ppi["item_b"] == center["gene_name"]):
match = True
if first:
first = False
score_compare(row[8], scores, cutoff_score, ppi)
ppi["best"] = row[8]
else:
if (ppi["strain"] == pre_ppi["strain"]) and (
ppi["item_a"] == pre_ppi["item_a"]) and (
ppi["item_b"] == pre_ppi["item_b"]):
get_best(pre_ppi, ppi, row)
score_compare(row[8], scores, cutoff_score, ppi)
else:
assign_score_below(pre_ppi, scores, ppis)
scores = {"score": 0, "below": 0}
score_compare(row[8], scores, cutoff_score, ppi)
ppi["best"] = row[8]
pre_ppi = ppi
if start and match:
assign_score_below(pre_ppi, scores, ppis)
plot(ppis, center, pre_ppi["strain"],
cutoff_score, node_size, out_folder)
elif not start:
print("No proper result can be retrieved in " + PPI_file)
elif not match:
print("No interacted partner with {0} | {1}".format(
center["locus_tag"], center["gene_name"])) | ANNOgesic | /ANNOgesic-1.1.14.linux-x86_64.tar.gz/usr/local/lib/python3.10/dist-packages/annogesiclib/plot_PPI.py | plot_PPI.py |
import os
import sys
import shutil
from subprocess import call, DEVNULL
from annogesiclib.multiparser import Multiparser
from annogesiclib.converter import Converter
from annogesiclib.format_fixer import FormatFixer
from annogesiclib.helper import Helper
class RATT(object):
'''annotation transfer'''
def __init__(self, args_ratt):
self.multiparser = Multiparser()
self.converter = Converter()
self.format_fixer = FormatFixer()
self.helper = Helper()
if args_ratt.ref_gbk:
self.gbk = os.path.join(args_ratt.ref_gbk, "gbk_tmp")
self.gbk_tmp = os.path.join(self.gbk, "tmp")
self.embl = os.path.join(args_ratt.ref_gbk, "embls")
if args_ratt.ref_embls:
self.embl = args_ratt.ref_embls
self.ratt_log = os.path.join(args_ratt.output_path, "ratt_log.txt")
self.tmp_files = {"tar": os.path.join(args_ratt.tar_fastas, "tmp"),
"ref": os.path.join(args_ratt.ref_fastas, "tmp"),
"out_gff": os.path.join(args_ratt.gff_outfolder,
"tmp"),
"gff": os.path.join(args_ratt.gff_outfolder,
"tmp.gff"),
"ptt": os.path.join(args_ratt.gff_outfolder,
"tmp.ptt"),
"rnt": os.path.join(args_ratt.gff_outfolder,
"tmp.rnt")}
def _convert_to_pttrnt(self, gffs, files, log):
for gff in files:
if gff.endswith(".gff"):
gff = os.path.join(gffs, gff)
filename = gff.split("/")
prefix = filename[-1][:-4]
rnt = gff[:-3] + "rnt"
ptt = gff[:-3] + "ptt"
fasta = self.helper.get_correct_file(self.tmp_files["tar"],
".fa", prefix, None, None)
if fasta:
self.converter.convert_gff2rntptt(gff, prefix, fasta, ptt, rnt,
None, None)
log.write("\t" + ptt + " is generated.\n")
log.write("\t" + rnt + " is generated.\n")
def _remove_files(self, args_ratt, out_gbk, log):
self.helper.remove_all_content(args_ratt.gff_outfolder, ".gff", "file")
self.helper.remove_all_content(args_ratt.gff_outfolder, ".ptt", "file")
self.helper.remove_all_content(args_ratt.gff_outfolder, ".rnt", "file")
log.write("Moving the final output files to {0}.\n".format(args_ratt.gff_outfolder))
self.helper.move_all_content(self.tmp_files["out_gff"],
args_ratt.gff_outfolder, None)
log.write("Remove the temperary files.\n")
shutil.rmtree(self.tmp_files["out_gff"])
shutil.rmtree(self.tmp_files["tar"])
shutil.rmtree(self.tmp_files["ref"])
self.helper.remove_tmp_dir(args_ratt.tar_fastas)
self.helper.remove_tmp_dir(args_ratt.ref_fastas)
self.helper.remove_tmp_dir(args_ratt.ref_embls)
self.helper.remove_tmp_dir(args_ratt.ref_gbk)
def _convert_to_gff(self, ratt_result, args_ratt, files, log):
name = ratt_result.split(".")
filename = ".".join(name[1:-2]) + ".gff"
output_file = os.path.join(args_ratt.output_path, filename)
self.converter.convert_embl2gff(
os.path.join(args_ratt.output_path, ratt_result), output_file)
self.format_fixer.fix_ratt(output_file, ".".join(name[1:-2]),
"tmp_gff")
shutil.move("tmp_gff", output_file)
shutil.copy(output_file, os.path.join(args_ratt.gff_outfolder,
filename))
log.write("\t" + os.path.join(args_ratt.gff_outfolder, filename) +
" is generated.\n")
files.append(filename)
def _parser_embl_gbk(self, files):
self.helper.check_make_folder(self.gbk)
for file_ in files:
close = False
with open(file_, "r") as f_h:
for line in f_h:
if (line.startswith("LOCUS")):
out = open(self.gbk_tmp, "w")
datas = line.split(" ")
for data in datas:
if (len(data) != 0) and (data != "LOCUS"):
filename = ".".join([data.strip(), "gbk"])
break
elif (line.startswith("VERSION")):
datas = line.split(" ")
for data in datas:
if (len(data) != 0) and (data != "VERSION"):
new_filename = ".".join([data.strip(), "gbk"])
break
if new_filename.find(filename):
filename = new_filename
if out:
out.write(line)
if line.startswith("//"):
out.close()
close = True
shutil.move(self.gbk_tmp,
os.path.join(self.gbk, filename))
if not close:
out.close()
return self.gbk
def _convert_embl(self, ref_embls, log):
'''convert gbk to embl'''
detect_gbk = False
gbks = []
out_gbk = None
for embl in os.listdir(ref_embls):
if (embl.endswith(".gbk")) or (
embl.endswith(".gbff")) or (
embl.endswith(".gb")):
detect_gbk = True
gbks.append(os.path.join(ref_embls, embl))
if not detect_gbk:
log.write("--related_gbk_files is assigned, but not gbk files are detected.\n"
"The gbk file names need to be ended at .gbk, .gb, or .gbff. \n")
print("Error: Please assign proper Genebank files!")
sys.exit()
elif detect_gbk:
out_gbk = self._parser_embl_gbk(gbks)
log.write("Running converter.py to convert gbk file to embl format.\n")
self.converter.convert_gbk2embl(out_gbk)
self.helper.check_make_folder(self.embl)
self.helper.move_all_content(out_gbk, self.embl, [".embl"])
log.write("\t" + self.embl + " is generated and the embl files are stored in it.\n")
return out_gbk
def _run_ratt(self, args_ratt, tar, ref, out, log):
if (not os.path.exists(self.embl)) or (
not os.path.exists(os.path.join(
self.tmp_files["tar"], tar + ".fa"))) or (
not os.path.exists(os.path.join(
self.tmp_files["ref"], ref + ".fa"))):
print("Error: Please check --compare_pair, the strain names "
"should be the same as the strain names in fasta, "
"genbank or embl files!")
log.write("The strain names in --compare_pair should be the same "
"as the strain names in fasta, genbank, or embl files.\n")
sys.exit()
log.write("Make sure your RATT version is at least 1.64.\n")
log.write("If the RATT can not run properly, please check the "
"RATT_HOME and PAGIT_HOME is assigned correctly.\n")
temp_embl_folder = os.path.join(self.embl, ref)
os.mkdir(temp_embl_folder)
shutil.copy(os.path.join(self.embl, ref + ".embl"), os.path.join(self.embl, ref))
log.write(" ".join([args_ratt.ratt_path, self.embl,
os.path.join(self.tmp_files["tar"], tar + ".fa"),
args_ratt.element, args_ratt.transfer_type,
os.path.join(self.tmp_files["ref"], ref + ".fa")]) + "\n")
call([args_ratt.ratt_path, temp_embl_folder,
os.path.join(self.tmp_files["tar"], tar + ".fa"),
args_ratt.element, args_ratt.transfer_type,
os.path.join(self.tmp_files["ref"], ref + ".fa")],
stdout=out, stderr=DEVNULL)
shutil.rmtree(temp_embl_folder)
# call([args_ratt.ratt_path, self.embl,
# os.path.join(self.tmp_files["tar"], tar + ".fa"),
# args_ratt.element, args_ratt.transfer_type,
# os.path.join(self.tmp_files["ref"], ref + ".fa")],
# stdout=out, stderr=DEVNULL)
log.write("Done!\n")
def _format_and_run(self, args_ratt, log):
print("Running RATT")
for pair in args_ratt.pairs:
ref = pair.split(":")[0]
tar = pair.split(":")[1]
out = open(self.ratt_log, "w+")
self._run_ratt(args_ratt, tar, ref, out, log)
log.write("The following files are generatd:\n")
for filename in os.listdir():
if ("final" in filename):
log.write("\t" + filename + "\n")
shutil.move(filename, os.path.join(args_ratt.output_path,
filename))
elif (args_ratt.element in filename) or (
"query" in filename) or (
"Reference" in filename) or (
"Query" in filename) or (
"Sequences" in filename):
log.write("\t" + filename + "\n")
if os.path.isfile(filename):
os.remove(filename)
if os.path.isdir(filename):
shutil.rmtree(filename)
out.close()
def annotation_transfer(self, args_ratt, log):
self.multiparser.parser_fasta(args_ratt.tar_fastas)
self.multiparser.parser_fasta(args_ratt.ref_fastas)
out_gbk = None
if args_ratt.ref_embls is None:
out_gbk = self._convert_embl(args_ratt.ref_gbk, log)
self._format_and_run(args_ratt, log)
files = []
for data in os.listdir(args_ratt.output_path):
if "final.embl" in data:
log.write("Running converter.py to convert embl "
"files in {0} to gff, ptt, and rnt format.\n".format(data))
self._convert_to_gff(data, args_ratt, files, log)
self._convert_to_pttrnt(args_ratt.gff_outfolder, files, log)
self.helper.check_make_folder(self.tmp_files["out_gff"])
log.write("Merging the output of {0}.\n".format(data))
for folder in os.listdir(args_ratt.tar_fastas):
files = []
if "_folder" in folder:
datas = folder.split("_folder")
prefix = ".".join(datas[0].split(".")[:-1])
for file_ in os.listdir(os.path.join(args_ratt.tar_fastas,
folder)):
files.append(file_[:-3])
for gff in os.listdir(args_ratt.gff_outfolder):
for file_ in files:
if (".gff" in gff) and (file_ == gff[:-4]):
self.helper.merge_file(os.path.join(
args_ratt.gff_outfolder, gff),
self.tmp_files["gff"])
if (".ptt" in gff) and (file_ == gff[:-4]):
self.helper.merge_file(os.path.join(
args_ratt.gff_outfolder, gff),
self.tmp_files["ptt"])
if (".rnt" in gff) and (file_ == gff[:-4]):
self.helper.merge_file(os.path.join(
args_ratt.gff_outfolder, gff),
self.tmp_files["rnt"])
if os.path.exists(self.tmp_files["gff"]):
shutil.move(self.tmp_files["gff"], os.path.join(
self.tmp_files["out_gff"], prefix + ".gff"))
shutil.move(self.tmp_files["ptt"], os.path.join(
self.tmp_files["out_gff"], prefix + ".ptt"))
shutil.move(self.tmp_files["rnt"], os.path.join(
self.tmp_files["out_gff"], prefix + ".rnt"))
else:
print("Error: Please check your fasta or "
"annotation files, they should only contain "
"the query genome. And make sure your RATT can "
"work properly (check $ANNOgesic/output/"
"annotation_transfer/ratt_log.txt).")
log.write("Please check your fasta or "
"annotation files, they should only contain "
"the query genome. And make sure your RATT can "
"work properly (check $ANNOgesic/output/"
"annotation_transfer/ratt_log.txt).\n")
self._remove_files(args_ratt, out_gbk, log) | ANNOgesic | /ANNOgesic-1.1.14.linux-x86_64.tar.gz/usr/local/lib/python3.10/dist-packages/annogesiclib/ratt.py | ratt.py |
import os
import csv
import shutil
from annogesiclib.gff3 import Gff3Parser
from annogesiclib.helper import Helper
def read_gff(gff_file, type_):
cdss = []
g_h = open(gff_file)
for entry in Gff3Parser().entries(g_h):
if (Helper().feature_without_notgene(entry)):
if (type_ == "riboswitch") and (entry.feature != "riboswitch"):
cdss.append(entry)
elif (type_ == "thermometer") and (
entry.feature != "RNA_thermometer"):
cdss.append(entry)
cdss = sorted(cdss, key=lambda k: (k.seq_id, k.start, k.end, k.strand))
g_h.close()
return cdss
def check_repeat(tab, strain, strand, start, end, fuzzy):
start = start + fuzzy
end = end - fuzzy
if (tab["strain"] == strain) and (
tab["strand"] == strand):
if ((tab["start"] <= start) and (
tab["end"] >= end)) or (
(tab["start"] >= start) and (
tab["end"] <= end)) or (
(tab["start"] <= start) and (
tab["end"] <= end) and (
tab["end"] >= start)) or (
(tab["start"] >= start) and (
tab["start"] <= end) and (
tab["end"] >= end)):
return True
return False
def rbs_overlap(table_file, gff_file, type_, fuzzy):
tmp_tab = table_file + "_tmp"
cdss = read_gff(gff_file, type_)
out = open(tmp_tab, "w")
fh = open(table_file, "r")
tables = []
for row in csv.reader(fh, delimiter='\t'):
if not row[0].startswith("#"):
tables.append({"strain": row[1], "strand": row[2],
"start": int(row[4]), "end": int(row[5]),
"info": "\t".join(row)})
fh.close()
for tab in tables:
overlap = False
for cds in cdss:
overlap = check_repeat(tab, cds.seq_id, cds.strand,
cds.start, cds.end, fuzzy)
if overlap:
break
for com in tables:
if tab != com:
repeat = check_repeat(tab, com["strain"], com["strand"],
com["start"], com["end"], 0)
if (not overlap):
if ((repeat) and (
"print" not in tab.keys()) and (
"print" not in com.keys())) or (
not repeat):
overlap = False
else:
overlap = True
if not overlap:
tab["print"] = True
out.write(tab["info"] + "\n")
out.close()
os.remove(table_file)
shutil.move(tmp_tab, table_file) | ANNOgesic | /ANNOgesic-1.1.14.linux-x86_64.tar.gz/usr/local/lib/python3.10/dist-packages/annogesiclib/rbs_overlap.py | rbs_overlap.py |
from annogesiclib.gff3 import Gff3Parser
def read_file(filename):
datas = []
f_h = open(filename, "r")
for entry in Gff3Parser().entries(f_h):
datas.append(entry)
datas = sorted(datas, key=lambda k: (k.seq_id, k.start, k.end, k.strand))
f_h.close()
return datas
def del_attributes(entry):
'''delete the feature (besides genome annotation)
which already has Parent'''
if (entry.feature == "CDS") or (
entry.feature == "tRNA") or (
entry.feature == "rRNA") or (
entry.feature == "exon"):
pass
else:
if "Parent" in entry.attributes.keys():
del entry.attributes["Parent"]
def print_file(entry, tran, out):
if "Parent" in entry.attributes.keys():
if str(tran.attributes["ID"]) not in entry.attributes["Parent"]:
entry.attributes["Parent"] = ",".join([
entry.attributes["Parent"], str(tran.attributes["ID"])])
else:
entry.attributes["Parent"] = str(tran.attributes["ID"])
attributes = {}
for key, value in entry.attributes.items():
if (key != "print") and (key != "parent_tran"):
attributes[key] = value
attribute_string = ";".join(
["=".join(items) for items in attributes.items()])
out.write("".join([entry.info_without_attributes, "\t",
attribute_string, "\n"]))
entry.attributes["print"] = True
def compare_tran(datas, tran, out):
'''comare transcript and 5UTR, 3UTR, gene/CDS for merging'''
for data in datas:
del_attributes(data)
if (data.seq_id == tran.seq_id) and (
data.strand == tran.strand):
if (data.start >= tran.start) and (
data.end <= tran.end):
print_file(data, tran, out)
def print_rest(datas, out):
'''print the rest data which is not related to operon'''
for data in datas:
if "print" not in data.attributes.keys():
out.write(data.info + "\n")
def compare_tran_term(term, tran, out, fuzzy_term):
'''compare transcript and terminator for merging'''
if (term.seq_id == tran.seq_id) and (
term.strand == tran.strand):
if (term.start >= tran.start) and (
term.end <= tran.end):
print_file(term, tran, out)
else:
if term.strand == "+":
if ((term.start - fuzzy_term) <= tran.end) and (
term.end + fuzzy_term >= tran.end):
print_file(term, tran, out)
elif (term.start <= tran.end) and (
term.end >= tran.end):
print_file(term, tran, out)
else:
if (term.end + fuzzy_term >= tran.start) and (
term.start - fuzzy_term <= tran.start):
print_file(term, tran, out)
elif (term.start <= tran.start) and (
term.end >= tran.start):
print_file(term, tran, out)
def combine_gff(gff_file, ta_file, tss_file, utr5_file, utr3_file,
term_file, fuzzy_tss, fuzzy_term, out_file):
'''combine the features which related to operon to
form a operon gff file'''
gffs = read_file(gff_file)
trans = read_file(ta_file)
if tss_file is not None:
tsss = read_file(tss_file)
if utr5_file is not None:
utr5s = read_file(utr5_file)
if utr3_file is not None:
utr3s = read_file(utr3_file)
out = open(out_file, "w")
out.write("##gff-version 3\n")
if term_file is not None:
terms = read_file(term_file)
for tran in trans:
out.write(tran.info + "\n")
if tss_file is not None:
for tss in tsss:
del_attributes(tss)
if (tss.seq_id == tran.seq_id) and (tss.strand == tran.strand):
if tss.strand == "+":
if ((tss.start + fuzzy_tss) >= tran.start) and (
tss.start <= tran.end):
print_file(tss, tran, out)
else:
if (tss.start >= tran.start) and (
tss.end - fuzzy_tss <= tran.end):
print_file(tss, tran, out)
if utr5_file is not None:
compare_tran(utr5s, tran, out)
compare_tran(gffs, tran, out)
if utr3_file is not None:
compare_tran(utr3s, tran, out)
if term_file is not None:
for term in terms:
del_attributes(term)
compare_tran_term(term, tran, out, fuzzy_term)
if tss_file is not None:
print_rest(tsss, out)
if utr5_file is not None:
print_rest(utr5s, out)
print_rest(gffs, out)
if utr3_file is not None:
print_rest(utr3s, out)
if term_file is not None:
print_rest(terms, out)
out.close() | ANNOgesic | /ANNOgesic-1.1.14.linux-x86_64.tar.gz/usr/local/lib/python3.10/dist-packages/annogesiclib/combine_gff.py | combine_gff.py |
import os
import sys
import shutil
from glob import glob
from annogesiclib.multiparser import Multiparser
from annogesiclib.helper import Helper
from contextlib import redirect_stdout
class ArgsContainer(object):
def __init__(self):
self.multiparser = Multiparser()
self.helper = Helper()
def _check_strain_length(self, strain_lens, flag):
lengths = {}
for m_l in strain_lens:
if ":" not in m_l:
print("Error: The assignment of {0} needs to contain "
"genome names and their length "
"of checked region!".format(flag))
sys.exit()
else:
if m_l.split(":")[-1] == "all":
lengths[m_l.split(":")[0]] = m_l.split(":")[-1]
else:
lengths[m_l.split(":")[0]] = int(m_l.split(":")[-1])
return lengths
def _check_track_name(self, lib, tracks, strand):
if lib.split("/")[-1] in tracks["file"]:
print("Error: {0} of the wiggle files is repeated!".format(
lib.split("/")[-1]))
sys.exit()
else:
tracks["file"].append(lib.split("/")[-1])
with open(lib) as fh:
for line in fh:
line = line.strip()
if line.startswith("track"):
track_name = line.split(" ")[-1]
if track_name in tracks["track"][strand]:
print("Error: {0} of the tracks in the "
"wiggle files is repeated!".format(track_name))
sys.exit()
else:
tracks["track"][strand].append(track_name)
def _create_working_wigs(self, out_folder, libs, wig_folder, tracks):
new_libs = []
if libs is not None:
self.helper.check_make_folder(wig_folder)
for lib in libs:
if not os.path.exists(lib.split(":")[0]):
print("Error: {0} of the wiggle files is not found!".format(
lib.split(":")[0]))
sys.exit()
self._check_track_name(lib.split(":")[0], tracks,
lib.split(":")[-1])
shutil.copy(lib.split(":")[0], wig_folder)
wig = lib.split(":")[0].split("/")[-1]
new_libs.append(":".join([wig, ":".join(lib.split(":")[1:])]))
else:
new_libs = None
return new_libs
def _check_replicates(self, replicates_tex, replicates_frag,
tex_lib, frag_lib):
'''Check the replicate of frag and tex libs'''
if (tex_lib is not None) and (replicates_tex is None):
print("Error: No replicates numbers for "
"TEX treated libraries are assigned!")
sys.exit()
if (frag_lib is not None) and (replicates_frag is None):
print("Error: No replicates numbers for "
"fragmented libraries are assigned!")
sys.exit()
if (replicates_tex is not None) and (replicates_frag is not None):
replicates = {"tex": replicates_tex,
"frag": replicates_frag}
elif replicates_tex is not None:
replicates = {"tex": replicates_tex, "frag": -1}
elif replicates_frag is not None:
replicates = {"tex": -1, "frag": replicates_frag}
else:
print("Error: No replicates number was assigned!")
sys.exit()
if replicates["tex"] != -1:
for rep in replicates["tex"]:
if ("_" not in rep):
print("Error: Please check the input format of replicate_tex! "
"It should also contain condition name.")
sys.exit()
if replicates["frag"] != -1:
for rep in replicates["frag"]:
if ("_" not in rep):
print("Error: Please check the input format of replicate_frag! "
"It should also contain condition name.")
sys.exit()
return replicates
def _check_assign_info(self, infos, file_type, wig_type):
if file_type == "cond":
index = 1
elif file_type == "rep":
index = 97
for info, num in sorted(infos.items()):
if (file_type == "cond") or (file_type == "rep"):
if file_type == "cond":
if info != index:
print("Error: The condition number and order "
"of --tex_notex_libs should follow 1, 2, 3.")
sys.exit()
elif file_type == "rep":
if ord(info) != index:
print("Error: The replicate index and order "
"of --tex_notex_libs should follow a, b, c.")
sys.exit()
if (wig_type == "tex") and (num % 4 != 0):
print("Error: The --tex_notex_libs was assinged incorrectly. "
"Please check it again.")
sys.exit()
elif (wig_type == "frag") and (num % 2 != 0):
print("Error: The --frag_libs was assinged incorrectly. "
"Please check it again.")
sys.exit()
index += 1
elif file_type == "strand":
if wig_type == "frag":
if (infos["+"] != infos["-"]):
print("Error: The --frag_libs was assinged incorrectly. "
"Please check it again.")
sys.exit()
if wig_type == "tex":
if (num % 2 != 0) and (infos["+"] != infos["-"]):
print("Error: The --tex_notex_libs was assinged incorrectly. "
"Please check it again.")
sys.exit()
def _check_tex_frag(self, libs, wig_type):
conds = {}
reps = {}
strands = {}
for lib in libs:
datas = lib.split(":")
if not datas[0].endswith(".wig"):
print("Error: {0} should end with .wig!".format(datas[0]))
sys.exit()
if (datas[1] != "notex") and (
datas[1] != "tex") and (
datas[1] != "frag"):
print("Error: Please assign \"tex\", \"notex\" or "
"\"frag\" to your input libraries.")
sys.exit()
try:
if int(datas[2]) not in conds.keys():
conds[int(datas[2])] = 0
conds[int(datas[2])] += 1
except ValueError:
print("Error: Condition of libs should be assigned by integers!")
sys.exit()
if datas[3] not in reps.keys():
reps[datas[3]] = 0
reps[datas[3]] += 1
datas[4] = datas[4].strip()
if (datas[4] != "+") and (datas[4] != "-"):
print("Error: Strand of libs should be assigned as + or -")
sys.exit()
if datas[4] not in strands.keys():
strands[datas[4]] = 0
strands[datas[4]] += 1
self._check_assign_info(conds, "cond", wig_type)
self._check_assign_info(reps, "rep", wig_type)
self._check_assign_info(strands, "strand", wig_type)
def _check_libs(self, tex_notex_libs, frag_libs):
'''Check the libs of frag and tex'''
if (tex_notex_libs is None) and (frag_libs is None):
print("Error: No libraries assigned!!")
sys.exit()
elif (tex_notex_libs is not None) and (frag_libs is not None):
libs = tex_notex_libs + frag_libs
self._check_tex_frag(tex_notex_libs, "tex")
self._check_tex_frag(frag_libs, "frag")
elif (tex_notex_libs is not None):
libs = tex_notex_libs
self._check_tex_frag(tex_notex_libs, "tex")
elif (frag_libs is not None):
libs = frag_libs
self._check_tex_frag(frag_libs, "frag")
return libs
def _check_condition_num(self, out_prefix, libs):
high = 0
for lib in libs:
datas = lib.split(":")
if int(datas[2]) > high:
high = int(datas[2])
if len(out_prefix) != high:
print("Error: The number of --condition_names should be "
"the same to the condition of input libraries!")
sys.exit()
def _combine_files(self, ref_files, out_folder, filename):
if ref_files is not None:
tar_file = os.path.join(out_folder, filename)
if os.path.exists(tar_file):
os.remove(tar_file)
for files in ref_files:
for file_ in glob(files):
self.helper.merge_file(file_, tar_file)
return tar_file
else:
return None
def _merge_by_strain(self, wig_path, libs):
strains = []
merge_folder = os.path.join(wig_path, "merge_tmp")
self.helper.check_make_folder(merge_folder)
for wig in os.listdir(wig_path):
if "_STRAIN_" in wig:
strain = wig.split("_STRAIN_")[-1].replace(".wig", "")
if strain not in strains:
strains.append(strain)
for strain in strains:
change_f = False
change_r = False
for wig in os.listdir(wig_path):
filename = wig.split("_STRAIN_")
if ("_STRAIN_" in wig) and (
filename[-1].replace(
".wig", "") == strain):
for lib in libs:
if (filename[0] in lib) and (lib[-1] == "+"):
self.helper.merge_file(
os.path.join(wig_path, wig),
os.path.join(merge_folder,
"tmp_forward.wig"))
change_f = True
elif (filename[0] in lib) and (lib[-1] == "-"):
self.helper.merge_file(
os.path.join(wig_path, wig),
os.path.join(merge_folder,
"tmp_reverse.wig"))
change_r = True
if change_f and change_r:
change_f = False
change_r = False
shutil.move(os.path.join(merge_folder, "tmp_forward.wig"),
os.path.join(merge_folder,
strain + "_forward.wig"))
shutil.move(os.path.join(merge_folder, "tmp_reverse.wig"),
os.path.join(merge_folder,
strain + "_reverse.wig"))
else:
print("Error: comparing input files of {0} failed. "
"Please check the seq IDs of all gff and fasta "
"files, they should be the same.\nPlease also "
"check the wiggle files which should contain "
"forward and reverse files.".format(strain))
sys.exit()
self.helper.remove_all_content(wig_path, ".wig", "file")
self.helper.move_all_content(merge_folder, wig_path, None)
shutil.rmtree(merge_folder)
def _parser_combine_wigs(self, subcommand):
'''Check the wig folders of frag and tex, then merge them'''
self.tex_path = None
self.frag_path = None
if subcommand == "transcript":
if self.gffs is not None:
self.multiparser.parser_gff(self.gffs, None)
gff_path = self.gffs
elif subcommand == "terminator":
self.multiparser.parser_gff(self.gffs, None)
gff_path = os.path.join(self.gffs, "tmp")
tmp_file = os.path.join(self.out_folder, "tmp.txt")
with open(tmp_file, 'w') as fh:
with redirect_stdout(fh):
self.multiparser.parser_gff(gff_path, None)
os.remove(tmp_file)
else:
self.multiparser.parser_gff(self.gffs, None)
gff_path = self.gffs
if self.tex_wigs is not None:
self.tex_path = os.path.join(self.tex_wigs, "tmp")
self.multiparser.parser_wig(self.tex_wigs)
if self.gffs is not None:
self.multiparser.combine_wig(gff_path, self.tex_path,
None, self.libs)
else:
self._merge_by_strain(self.tex_path, self.libs)
self.merge_wigs = self.tex_wigs
self.wig_path = self.tex_path
if self.frag_wigs is not None:
self.frag_path = os.path.join(self.frag_wigs, "tmp")
self.multiparser.parser_wig(self.frag_wigs)
if self.gffs is not None:
self.multiparser.combine_wig(gff_path, self.frag_path,
None, self.libs)
else:
self._merge_by_strain(self.frag_path, self.libs)
self.merge_wigs = self.frag_wigs
self.wig_path = self.frag_path
if (self.tex_path is not None) and (
self.frag_path is not None):
self = self._merge_wig()
if (self.tex_path is None) and (
self.frag_path is None):
print("Error: There is no proper wig files assigned!!")
sys.exit()
return self
def _merge_wig(self):
'''Copy the wig files to one folder'''
self.merge_wigs = os.path.join(self.out_folder, "merge_wigs")
if (self.tex_wigs is not None) and (
self.frag_wigs is not None):
self.helper.check_make_folder(self.merge_wigs)
self.wig_path = os.path.join(self.merge_wigs, "tmp")
self.helper.check_make_folder(self.wig_path)
for wig in os.listdir(self.tex_wigs):
if os.path.isfile(os.path.join(self.tex_wigs, wig)):
shutil.copy(os.path.join(self.tex_wigs, wig),
self.merge_wigs)
for wig in os.listdir(self.frag_wigs):
if os.path.isfile(os.path.join(self.frag_wigs, wig)):
shutil.copy(os.path.join(self.frag_wigs, wig),
self.merge_wigs)
for wig in os.listdir(self.tex_path):
if os.path.isfile(os.path.join(self.tex_path, wig)):
shutil.copy(os.path.join(self.tex_path, wig),
self.wig_path)
for wig in os.listdir(self.frag_path):
if os.path.isfile(os.path.join(self.frag_path, wig)):
self.helper.merge_file(os.path.join(self.frag_path, wig),
os.path.join(self.wig_path, wig))
elif (self.tex_wigs is not None):
self.merge_wigs = self.tex_wigs
elif (self.frag_wigs is not None):
self.merge_wigs = self.frag_wigs
return self
def _deal_multi_inputs(self, inputs, file_type, num, command):
'''It is for split the input if it is assigned to multiple factors'''
if inputs is not None:
datas = inputs.split(",")
if num is not None:
if (len(datas) != num):
print("Error: the amount of {0} is not correct!!".format(
command))
new_inputs = []
for data in datas:
if file_type == "float":
new_inputs.append(float(data.strip()))
elif file_type == "int":
new_inputs.append(int(data.strip()))
else:
new_inputs.append(data)
return new_inputs
else:
return inputs
def _gen_copy_new_folder(self, file_types, out_folder,
folder_name, ref_files, flag):
if ref_files is not None:
new_ref_folder = os.path.join(out_folder, folder_name)
self.helper.check_make_folder(new_ref_folder)
for files in ref_files:
detect = False
for file_ in glob(files):
for type_ in file_types:
if file_.endswith(type_):
detect = True
if not detect:
print("Error: {0} doesn't exist or "
"the {0} is/are not ended with {1}!".format(
files, " ".join(file_types)))
sys.exit()
shutil.copy(file_, new_ref_folder)
if (file_.endswith(".gff3")):
new_name = os.path.basename(file_)[:-1]
shutil.move(os.path.join(new_ref_folder, os.path.basename(file_)),
os.path.join(new_ref_folder, new_name))
return new_ref_folder
else:
return None
def _check_tss_parameter_setting(
self, auto_load, genome_order, height, height_reduction, factor,
factor_reduction, base_height, enrichment_factor,
processing_factor):
if auto_load:
if not os.path.exists(auto_load):
print("Error: {0} is not found. Please assign proper folder to "
"--auto_load_optimized_parameters!".format(auto_load))
sys.exit()
else:
para_lists = [height, height_reduction, factor, factor_reduction,
base_height, enrichment_factor, processing_factor]
para_names = ["--height", "--height_reduction", "--factor",
"--factor_reduction", "--base_height",
"--enrichment_factor", "--processing_factor"]
if (genome_order is not None):
for para_list, para_name in zip(para_lists, para_names):
if len(genome_order) != len(para_list):
print("Error: --genome_order and {0} have different number of "
"of genomes!".format(para_name))
sys.exit()
else:
for para_list, para_name in zip(para_lists, para_names):
if len(para_list) != 1:
print("Error: --genome_order is default (using one "
"parameter set to all genomes) but {0} has more "
"than 1 input values!".format(para_name))
sys.exit()
def container_ratt(self, ratt_path, element, transfer_type,
ref_embl, ref_gbk, target_fasta, ref_fasta, ratt_folder,
tar_annotation_folder, compare_pair):
self.ratt_path = ratt_path
self.element = element
self.transfer_type = transfer_type
self.ref_embls = self._gen_copy_new_folder(
[".embl"], ratt_folder, "temp_embl",
ref_embl, ["--ref_embl_files"])
self.ref_gbk = self._gen_copy_new_folder(
[".gbk", ".gbff", ".gb"], ratt_folder, "temp_gbk",
ref_gbk, ["--ref_gbk_files"])
file_types = [".fa", ".fna", ".fasta"]
self.tar_fastas = self._gen_copy_new_folder(
file_types, ratt_folder, "temp_tar", target_fasta,
["--ref_fasta_files"])
self.ref_fastas = self._gen_copy_new_folder(
file_types, ratt_folder, "temp_ref", ref_fasta,
["--target_fasta_files"])
self.output_path = ratt_folder
self.gff_outfolder = tar_annotation_folder
self.pairs = compare_pair
return self
def container_tsspredator(self, TSSpredator_path, compute_program,
fasta_files, annotation_files, lib,
output_prefix, output_id, auto_load, genome_order,
height, height_reduction, factor, factor_reduction,
base_height, enrichment_factor,
processing_factor, replicate_match, out_folder,
validate_gene, merge_manual, strain_lengths,
compare_transcript_assembly, fuzzy, utr_length,
cluster, re_check_orphan, overlap_feature,
overlap_gff, remove_low_expression):
if strain_lengths is not None:
nt_lengths = self._check_strain_length(
strain_lengths, "--genome_lengths")
self.strain_lengths = nt_lengths
else:
self.strain_lengths = strain_lengths
if merge_manual is not None:
self.strain_lengths = {"all": "all"}
self.tsspredator_path = TSSpredator_path
self.program = compute_program
self._check_tss_parameter_setting(
auto_load, genome_order, height, height_reduction, factor,
factor_reduction, base_height, enrichment_factor, processing_factor)
self.fastas = self._gen_copy_new_folder(
[".fa", ".fna", ".fasta"], out_folder, "tmp_fasta", fasta_files,
["--fasta_files"])
self.gffs = self._gen_copy_new_folder(
[".gff", ".gff3"], out_folder, "tmp_anno", annotation_files,
["--annotation_files"])
self.wig_folder = os.path.join(out_folder, "tmp_wig")
self.helper.check_make_folder(self.wig_folder)
tracks = {"file": [], "track": {"+": [], "-": []}}
self.libs = self._create_working_wigs(out_folder, lib,
self.wig_folder, tracks)
self.libs = self._check_libs(self.libs, None)
self._check_condition_num(output_prefix, self.libs)
self.output_prefixs = output_prefix
self.output_id = output_id
self.auto_load = auto_load
self.genome_order = genome_order
self.height = height
self.height_reduction = height_reduction
self.factor = factor
self.factor_reduction = factor_reduction
self.base_height = base_height
self.enrichment_factor = enrichment_factor
self.processing_factor = processing_factor
self.repmatch = replicate_match
self.out_folder = out_folder
self.validate = validate_gene
self.manual = self._gen_copy_new_folder(
[".gff", ".gff3"], out_folder, "tmp_manual", merge_manual,
["--manual_files_lengths"])
self.ta_files = self._gen_copy_new_folder(
[".gff", ".gff3"], out_folder, "tmp_ta", compare_transcript_assembly,
["--compare_transcript_assembly"])
self.fuzzy = fuzzy
self.utr_length = utr_length
self.cluster = cluster
self.check_orphan = re_check_orphan
self.overlap_feature = overlap_feature
self.overlap_gffs = self._gen_copy_new_folder(
[".gff", ".gff3"], out_folder, "tmp_reference", overlap_gff,
["--compare_overlap_gff"])
self.remove_low_expression = remove_low_expression
return self
def container_optimize(self, TSSpredator_path, fasta_file, annotation_file,
manual, out_folder, max_height,
max_height_reduction, max_factor,
max_factor_reduction, max_base_height,
max_enrichment_factor, max_processing_factor,
utr_length, lib, output_prefix, output_id,
cluster, strain_lengths, core, program,
replicate_match, steps):
self.tsspredator_path = TSSpredator_path
if strain_lengths is not None:
nt_lengths = self._check_strain_length(
strain_lengths, "--genome_lengths")
self.strain_lengths = nt_lengths
else:
self.strain_lengths = strain_lengths
if manual is not None:
self.strain_lengths = {"all": "all"}
self.fastas = self._gen_copy_new_folder(
[".fa", ".fna", ".fasta"], out_folder, "tmp_fasta",
fasta_file, ["--fasta_files"])
self.manuals = self._gen_copy_new_folder(
[".gff", ".gff3"], out_folder, "tmp_manual",
manual, ["--manual_files"])
self.gffs = self._gen_copy_new_folder(
[".gff", ".gff3"], out_folder, "tmp_anno",
annotation_file, ["--annotation_files"])
self.wigs = os.path.join(out_folder, "tmp_wig")
self.helper.check_make_folder(self.wigs)
tracks = {"file": [], "track": {"+": [], "-": []}}
self.libs = self._create_working_wigs(out_folder, lib,
self.wigs, tracks)
self.libs = self._check_libs(self.libs, None)
self.output_folder = out_folder
self.height = max_height
self.height_reduction = max_height_reduction
self.factor = max_factor
self.factor_reduction = max_factor_reduction
self.base_height = max_base_height
self.enrichment = max_enrichment_factor
self.processing = max_processing_factor
self.utr = utr_length
self._check_condition_num(output_prefix, self.libs)
self.replicate_name = output_prefix
self.output_id = output_id
self.cluster = cluster
self.cores = core
self.program = program
self.replicate = replicate_match
self.steps = steps
return self
def _create_wig_folder(self, folder, libs):
if libs is not None:
self.helper.check_make_folder(folder)
return folder
else:
return None
def container_terminator(
self, TransTermHP_path, expterm_path, RNAfold_path, out_folder,
fasta_files, annotation_files, transcript_files, srna,
decrease, highest_coverage, fuzzy_detect_coverage,
fuzzy_within_transcript, fuzzy_downstream_transcript,
fuzzy_within_gene, fuzzy_downstream_gene, transtermhp_folder,
tex_notex_libs, frag_libs, tex_notex, replicates_tex,
replicates_frag, min_loop_length, max_loop_length,
min_stem_length, max_stem_length, min_AT_tail, miss_rate,
mut_u, keep_multi, window, shift):
self.TransTermHP_path = TransTermHP_path
self.expterm_path = expterm_path
self.RNAfold_path = RNAfold_path
self.out_folder = out_folder
self.fastas = self._gen_copy_new_folder(
[".fa", ".fna", ".fasta"], out_folder, "tmp_fasta", fasta_files,
["--fasta_files"])
self.gffs = self._gen_copy_new_folder(
[".gff", ".gff3"], out_folder, "tmp_anno", annotation_files,
["--annotation_files"])
self.trans = self._gen_copy_new_folder(
[".gff", ".gff3"], out_folder, "tmp_ta", transcript_files,
["--transcript_files"])
self.srnas = self._gen_copy_new_folder(
[".gff", ".gff3"], out_folder, "tmp_srna", srna, ["srna_files"])
self.helper.check_make_folder(os.path.join(out_folder, "tmp_wig"))
self.tex_wigs = self._create_wig_folder(
os.path.join(out_folder, "tmp_wig", "tex_notex"),
tex_notex_libs)
self.frag_wigs = self._create_wig_folder(
os.path.join(out_folder, "tmp_wig", "frag"), frag_libs)
self.decrease = decrease
self.cutoff_coverage = highest_coverage
self.fuzzy = fuzzy_detect_coverage
self.fuzzy_up_ta = fuzzy_within_transcript
self.fuzzy_down_ta = fuzzy_downstream_transcript
self.fuzzy_up_gene = fuzzy_within_gene
self.fuzzy_down_gene = fuzzy_downstream_gene
self.hp_folder = transtermhp_folder
tracks = {"file": [], "track": {"+": [], "-": []}}
self.tlibs = self._create_working_wigs(
out_folder, tex_notex_libs, self.tex_wigs, tracks)
self.flibs = self._create_working_wigs(
out_folder, frag_libs, self.frag_wigs, tracks)
self.libs = self._check_libs(self.tlibs, self.flibs)
self.tex_notex = tex_notex
self.replicates_tex = replicates_tex
self.replicates_frag = replicates_frag
self.replicates = self._check_replicates(
replicates_tex, replicates_frag, tex_notex_libs, frag_libs)
self.min_loop = min_loop_length
self.max_loop = max_loop_length
self.min_stem = min_stem_length
self.max_stem = max_stem_length
self.at_tail = min_AT_tail
self.miss_rate = miss_rate
self.mut_u = mut_u
self.keep_multi = keep_multi
self.window = window
self.shift = shift
self = self._parser_combine_wigs("terminator")
return self
def container_transcript(self, tex_notex, modifys, length, annotation_files,
height, width, tolerance, tolerance_coverage,
replicates_tex, replicates_frag, out_folder,
tss_files, TSS_fuzzy, tex_treated_libs,
fragmented_libs, compare_feature_genome,
terminator_files, fuzzy_term, max_dist):
if (compare_feature_genome is not None) and (annotation_files is None):
print("Error: --annotation_files needs to be assigned if "
"--compare_feature_genome is assigned.")
sys.exit()
for modify in modifys:
if (modify != "merge_overlap") and (
modify != "extend_5end") and (
modify != "extend_3end") and (
modify != "within_extend_ends") and (
modify != "none"):
print("Error: --modify_transcript need to be assign as "
"\"merge_overlap\", \"extend_5end\", \"extend_3end\", "
"\"within_extend_ends\" or \"none\". "
"The assignment is wrong!")
sys.exit()
self.modify = modifys
self.helper.check_make_folder(os.path.join(out_folder, "tmp_wig"))
self.tex_wigs = self._create_wig_folder(
os.path.join(out_folder, "tmp_wig", "tex_notex"),
tex_treated_libs)
self.frag_wigs = self._create_wig_folder(
os.path.join(out_folder, "tmp_wig", "frag"), fragmented_libs)
self.tex = tex_notex
self.length = length
self.gffs = self._gen_copy_new_folder(
[".gff", ".gff3"], out_folder, "tmp_anno", annotation_files,
["--annotation_files"])
self.height = height
self.width = width
self.tolerance = tolerance
self.low_cutoff = tolerance_coverage
self.replicates_tex = replicates_tex
self.replicates_frag = replicates_frag
self.replicates = self._check_replicates(
replicates_tex, replicates_frag,
tex_treated_libs, fragmented_libs)
self.out_folder = out_folder
self.compare_tss = self._gen_copy_new_folder(
[".gff", ".gff3"], out_folder, "tmp_tss", tss_files, ["--tss_files"])
self.fuzzy = TSS_fuzzy
tracks = {"file": [], "track": {"+": [], "-": []}}
self.tlibs = self._create_working_wigs(
out_folder, tex_treated_libs, self.tex_wigs, tracks)
self.flibs = self._create_working_wigs(
out_folder, fragmented_libs, self.frag_wigs, tracks)
self.libs = self._check_libs(self.tlibs, self.flibs)
self.c_feature = compare_feature_genome
self.terms = self._gen_copy_new_folder(
[".gff", ".gff3"], out_folder, "tmp_term", terminator_files,
["--terminator_files"])
self.fuzzy_term = fuzzy_term
self.max_dist = max_dist
self = self._parser_combine_wigs("transcript")
return self
def container_utr(self, tss_files, annotation_files,
transcript_assembly_files, terminator_files,
terminator_fuzzy, utr_folder, tss_source, base_5utr,
length, base_3utr):
self.tsss = self._gen_copy_new_folder(
[".gff", ".gff3"], utr_folder, "tmp_tss", tss_files, ["--tss_files"])
self.gffs = self._gen_copy_new_folder(
[".gff", ".gff3"], utr_folder, "tmp_anno", annotation_files,
["--annotation_files"])
self.trans = self._gen_copy_new_folder(
[".gff", ".gff3"], utr_folder, "tmp_ta", transcript_assembly_files,
["--transcript_files"])
self.terms = self._gen_copy_new_folder(
[".gff", ".gff3"], utr_folder, "tmp_term", terminator_files,
["--terminator_files"])
self.fuzzy = terminator_fuzzy
self.out_folder = utr_folder
self.source = tss_source
self.base_5utr = base_5utr
self.base_3utr = base_3utr
self.length = length
return self
def container_srna(self, rnafold, relplot_pl, mountain_pl, blastn, blastx,
blastdb, srna_folder, UTR_derived_sRNA,
annotation_files, TSS_files, transcript_files,
TSS_intergenic_fuzzy, TSS_5UTR_fuzzy, TSS_3UTR_fuzzy,
TSS_interCDS_fuzzy, import_info, processing_site_files,
fasta_files, mountain_plot, nr_format, srna_format,
sRNA_database_path, nr_database_path, cutoff_energy,
para_blast, blast_score_s, blast_score_n,
run_intergenic_TEX_coverage,
run_intergenic_noTEX_coverage,
run_intergenic_fragmented_coverage, break_tran,
run_antisense_TEX_coverage,
run_antisense_noTEX_coverage,
run_antisense_fragmented_coverage, run_utr_TEX_coverage,
run_utr_noTEX_coverage, run_utr_fragmented_coverage,
max_length, min_length, tex_notex_libs, frag_libs,
replicates_tex, replicates_frag, tex_notex, blast_e_nr,
blast_e_srna, detect_sRNA_in_CDS,
decrease_intergenic, decrease_utr, fuzzy_intergenic,
fuzzy_utr, cutoff_nr_hit, sORF, overlap_percent_CDS,
terminator_files, terminator_fuzzy_in_sRNA,
terminator_fuzzy_out_sRNA, ignore_hypothetical_protein,
TSS_source, min_utr_coverage, promoter_tables,
ranking_promoter, promoter_name, compute_sec_str,
len_u, num_u, mut_u, ex_srna):
self.rnafold = rnafold
self.ex_srna = ex_srna
self.compute_sec_str = compute_sec_str
self.para_blast = para_blast
self.relplot_pl = relplot_pl
self.mountain_pl = mountain_pl
self.blastx = blastx
self.blastn = blastn
self.blastdb = blastdb
self.out_folder = srna_folder
self.utr_srna = UTR_derived_sRNA
self.len_u = len_u
self.num_u = num_u
self.mut_u = mut_u
self.blast_score_s = blast_score_s
if (promoter_tables is not None) and (promoter_name is None):
print("Error: No promoter names are assigned!\n")
sys.exit()
if blast_score_n is None:
self.blast_score_n = 0
else:
self.blast_score_n = blast_score_n
self.gffs = self._gen_copy_new_folder(
[".gff", ".gff3"], srna_folder, "temp_anno", annotation_files,
["--annotation_files"])
self.tss_folder = self._gen_copy_new_folder(
[".gff", ".gff3"], srna_folder, "temp_tss", TSS_files, ["--tss_files"])
self.trans = self._gen_copy_new_folder(
[".gff", ".gff3"], srna_folder, "temp_ta", transcript_files,
["--transcript_files"])
self.fuzzy_inter_tss = TSS_intergenic_fuzzy
self.fuzzy_5utr_tss = TSS_5UTR_fuzzy
self.fuzzy_3utr_tss = TSS_3UTR_fuzzy
self.fuzzy_intercds_tss = TSS_interCDS_fuzzy
self.fuzzy_tsss = {"5utr": self.fuzzy_5utr_tss,
"3utr": self.fuzzy_3utr_tss,
"interCDS": self.fuzzy_intercds_tss,
"inter": self.fuzzy_inter_tss}
self.import_info = import_info
self.helper.check_make_folder(os.path.join(srna_folder, "temp_wig"))
self.tex_wigs = self._create_wig_folder(
os.path.join(srna_folder, "temp_wig", "tex_notex"),
tex_notex_libs)
self.frag_wigs = self._create_wig_folder(
os.path.join(srna_folder, "temp_wig", "frag"), frag_libs)
self.pro_folder = self._gen_copy_new_folder(
[".gff", ".gff3"], srna_folder, "temp_pro", processing_site_files,
["--processing_site_files"])
self.fastas = self._gen_copy_new_folder(
[".fa", ".fna", ".fasta"], srna_folder,
"temp_fasta", fasta_files, ["--fasta_files"])
self.mountain = mountain_plot
self.nr_format = nr_format
self.srna_format = srna_format
self.srna_database = sRNA_database_path
self.nr_database = nr_database_path
self.energy = cutoff_energy
self.coverage_tex = self._deal_multi_inputs(
run_intergenic_TEX_coverage, "float", 5,
"--run_intergenic_TEX_coverage")
self.coverage_notex = self._deal_multi_inputs(
run_intergenic_noTEX_coverage, "float", 5,
"--run_intergenic_noTEX_coverage")
self.coverage_frag = self._deal_multi_inputs(
run_intergenic_fragmented_coverage, "float", 5,
"--run_intergenic_fragmented_coverage")
self.anti_cover_tex = self._deal_multi_inputs(
run_antisense_TEX_coverage, "float", 5,
"--run_antisense_TEX_coverage")
self.anti_cover_notex = self._deal_multi_inputs(
run_antisense_noTEX_coverage, "float", 5,
"--run_antisense_noTEX_coverage")
self.anti_cover_frag = self._deal_multi_inputs(
run_antisense_fragmented_coverage, "float", 5,
"--run_antisense_fragmented_coverage")
self.break_tran = self._deal_multi_inputs(
break_tran, "float", 3,
"--run_break_transcript")
self.utr_tex_cover = self._deal_multi_inputs(
run_utr_TEX_coverage, "str", 3, "--run_utr_TEX_coverage")
self.utr_notex_cover = self._deal_multi_inputs(
run_utr_noTEX_coverage, "str", 3, "--run_utr_TEX_coverage")
self.utr_frag_cover = self._deal_multi_inputs(
run_utr_fragmented_coverage, "str", 3,
"--run_utr_fragmented_coverage")
self.max_len = max_length
self.min_len = min_length
tracks = {"file": [], "track": {"+": [], "-": []}}
self.tlibs = self._create_working_wigs(
srna_folder, tex_notex_libs, self.tex_wigs, tracks)
self.flibs = self._create_working_wigs(
srna_folder, frag_libs, self.frag_wigs, tracks)
self.libs = self._check_libs(self.tlibs, self.flibs)
self.replicates_tex = replicates_tex
self.replicates_frag = replicates_frag
self.replicates = self._check_replicates(
replicates_tex, replicates_frag, tex_notex_libs, frag_libs)
self.tex_notex = tex_notex
self.e_nr = blast_e_nr
self.e_srna = blast_e_srna
self.in_cds = detect_sRNA_in_CDS
self.decrease_inter = decrease_intergenic
self.decrease_utr = decrease_utr
self.fuzzy_inter = fuzzy_intergenic
self.fuzzy_utr = fuzzy_utr
self.nr_hits_num = cutoff_nr_hit
self.sorf_file = self._gen_copy_new_folder(
[".gff", ".gff3"], srna_folder, "temp_sorf", sORF, ["--sorf_files"])
self.cutoff_overlap = overlap_percent_CDS
self.terms = self._gen_copy_new_folder(
[".gff", ".gff3"], srna_folder, "temp_term", terminator_files,
["--terminator_files"])
self.fuzzy_b = terminator_fuzzy_in_sRNA
self.fuzzy_a = terminator_fuzzy_out_sRNA
self.hypo = ignore_hypothetical_protein
self.source = TSS_source
self.min_utr = min_utr_coverage
self.promoter_table = self._combine_files(
promoter_tables, srna_folder, "tmp_promoter_table")
if ranking_promoter < 1:
print("Error: --ranking_time_promoter must larger than 1...")
sys.exit()
self.rank_promoter = ranking_promoter
self.promoter_name = promoter_name
self = self._parser_combine_wigs("srna")
if (not TSS_source) and (tex_notex_libs is not None):
self.input_libs = self.tlibs
return self
def container_intersrna(self, file_type, files, args_srna, prefix,
gff_file, tran_file, tss_file, pro_file, fuzzy):
'''Especially for intergenic and antisense sRNA'''
args_srna.file_type = file_type
args_srna.gff_file = gff_file
args_srna.tran_file = tran_file
args_srna.tss_file = tss_file
args_srna.pro_file = pro_file
args_srna.fuzzy = fuzzy
args_srna.prefix = prefix
if file_type == "frag":
args_srna.wig_f_file = os.path.join(
args_srna.frag_path, "_".join([prefix, "forward.wig"]))
args_srna.wig_r_file = os.path.join(
args_srna.frag_path, "_".join([prefix, "reverse.wig"]))
args_srna.wig_folder = args_srna.frag_wigs
args_srna.input_libs = args_srna.flibs
args_srna.output_file = files["frag_gff"]
args_srna.output_table = files["frag_csv"]
args_srna.cutoffs = args_srna.coverage_frag
args_srna.source = args_srna.source
args_srna.cut_notex = args_srna.coverage_frag
args_srna.anti_notex_cutoff = None
else:
args_srna.wig_f_file = os.path.join(
args_srna.tex_path, "_".join([prefix, "forward.wig"]))
args_srna.wig_r_file = os.path.join(
args_srna.tex_path, "_".join([prefix, "reverse.wig"]))
args_srna.wig_folder = args_srna.tex_wigs
args_srna.input_libs = args_srna.tlibs
args_srna.output_file = files["tex_gff"]
args_srna.output_table = files["tex_csv"]
args_srna.cutoffs = args_srna.coverage_tex
args_srna.source = args_srna.source
args_srna.cut_notex = args_srna.coverage_notex
args_srna.anti_notex_cutoff = args_srna.anti_cover_notex
return args_srna
def container_utrsrna(self, gff, tran, tss, files, pro, fasta, file_type,
prefix, args_srna):
'''Especially for UTR-derived sRNA'''
args_srna.file_type = file_type
args_srna.gff_file = gff
args_srna.ta_file = tran
args_srna.tss_file = tss
args_srna.pro_file = pro
args_srna.prefix = prefix
args_srna.seq_file = fasta
if file_type == "frag":
args_srna.wig_f_file = os.path.join(
args_srna.frag_path, "_".join([prefix, "forward.wig"]))
args_srna.wig_r_file = os.path.join(
args_srna.frag_path, "_".join([prefix, "reverse.wig"]))
args_srna.wig_folder = args_srna.frag_wigs
args_srna.input_libs = args_srna.flibs
args_srna.output_file = files["frag_gff"]
args_srna.output_table = files["frag_csv"]
args_srna.utr_coverages = args_srna.utr_frag_cover
args_srna.notex = None
else:
args_srna.wig_f_file = os.path.join(
args_srna.tex_path, "_".join([prefix, "forward.wig"]))
args_srna.wig_r_file = os.path.join(
args_srna.tex_path, "_".join([prefix, "reverse.wig"]))
args_srna.wig_folder = args_srna.tex_wigs
args_srna.input_libs = args_srna.tlibs
args_srna.output_file = files["tex_gff"]
args_srna.output_table = files["tex_csv"]
args_srna.utr_coverages = args_srna.utr_tex_cover
args_srna.notex = args_srna.utr_notex_cover
args_srna.coverages = {"5utr": args_srna.utr_coverages[0],
"3utr": args_srna.utr_coverages[1],
"interCDS": args_srna.utr_coverages[2]}
if args_srna.notex is not None:
args_srna.cover_notex = {"5utr": args_srna.notex[0],
"3utr": args_srna.notex[1],
"interCDS": args_srna.notex[2]}
else:
args_srna.cover_notex = None
return args_srna
def extend_inter_container(self, args_srna, tsss, pros,
nums, output, out_table, texs, detects,
cutoff_coverage, notex):
'''Especially for intergenic and antisense sRNA'''
args_srna.tsss = tsss
args_srna.pros = pros
args_srna.nums = nums
args_srna.output = output
args_srna.out_table = out_table
args_srna.texs = texs
args_srna.detects = detects
args_srna.cutoff_coverage = cutoff_coverage
args_srna.notex = notex
return args_srna
def extend_utr_container(self, args_srna, cdss, tsss, pros,
out, out_t, texs):
'''Especially for UTR-derived sRNA'''
args_srna.cdss = cdss
args_srna.tsss = tsss
args_srna.pros = pros
args_srna.out = out
args_srna.out_t = out_t
args_srna.texs = texs
args_srna.utrs = []
args_srna.srnas = []
return args_srna
def container_sorf(self, sorf_folder, UTR_derived_sORF, transcript_files,
annotation_files, TSS_files, utr_length, min_length,
max_length, cutoff_intergenic_coverage,
cutoff_antisense_coverage, cutoff_5utr_coverage,
cutoff_3utr_coverage, cutoff_interCDS_coverage,
fasta_files, tex_notex_libs, frag_libs, tex_notex,
replicates_tex, replicates_frag, sRNA_files,
start_codon, stop_codon, cutoff_background, rbs_seq,
fuzzy_rbs, rbs_not_after_TSS, print_all_combination,
best_no_sRNA, best_no_TSS, ignore_hypothetical_protein,
min_rbs_distance, max_rbs_distance, extend_3, extend_5,
multi_stop):
self.multi_stop = multi_stop
self.out_folder = sorf_folder
self.rbs_seq = rbs_seq
self.extend_3 = extend_3
self.extend_5 = extend_5
self.utr_detect = UTR_derived_sORF
self.trans = self._gen_copy_new_folder(
[".gff", ".gff3"], sorf_folder, "temp_ta", transcript_files,
["--transcript_files"])
self.gffs = self._gen_copy_new_folder(
[".gff", ".gff3"], sorf_folder, "temp_anno", annotation_files,
["--annotation_files"])
self.tsss = self._gen_copy_new_folder(
[".gff", ".gff3"], sorf_folder, "temp_tss", TSS_files, ["--tss_files"])
self.utr_length = utr_length
self.min_len = min_length
self.max_len = max_length
self.helper.check_make_folder(os.path.join(sorf_folder, "temp_wig"))
self.tex_wigs = self._create_wig_folder(
os.path.join(sorf_folder, "temp_wig", "tex_notex"),
tex_notex_libs)
self.frag_wigs = self._create_wig_folder(
os.path.join(sorf_folder, "temp_wig", "frag"), frag_libs)
self.cutoff_inter = cutoff_intergenic_coverage
self.cutoff_anti = cutoff_antisense_coverage
self.cutoff_5utr = cutoff_5utr_coverage
self.cutoff_3utr = cutoff_3utr_coverage
self.cutoff_intercds = cutoff_interCDS_coverage
self.fastas = self._gen_copy_new_folder(
[".fa", ".fna", ".fasta"], sorf_folder,
"temp_fasta", fasta_files, ["--fasta_files"])
tracks = {"file": [], "track": {"+": [], "-": []}}
self.tlibs = self._create_working_wigs(
sorf_folder, tex_notex_libs, self.tex_wigs, tracks)
self.flibs = self._create_working_wigs(
sorf_folder, frag_libs, self.frag_wigs, tracks)
self.libs = self._check_libs(self.tlibs, self.flibs)
self.tex_notex = tex_notex
self.replicates_tex = replicates_tex
self.replicates_frag = replicates_frag
self.replicates = self._check_replicates(
replicates_tex, replicates_frag, tex_notex_libs, frag_libs)
self.srnas = self._gen_copy_new_folder(
[".gff", ".gff3"], sorf_folder, "temp_srna", sRNA_files,
["--srna_files"])
self.start_codon = start_codon
self.stop_codon = stop_codon
self.background = cutoff_background
self.fuzzy_rbs = fuzzy_rbs
self.noafter_tss = rbs_not_after_TSS
self.print_all = print_all_combination
self.no_srna = best_no_sRNA
self.no_tss = best_no_TSS
self.hypo = ignore_hypothetical_protein
self.min_rbs = min_rbs_distance
self.max_rbs = max_rbs_distance
self = self._parser_combine_wigs("sorf")
return self
def container_srna_target(
self, rnaplfold_path, rnaplex_path, rnaup_path, intarna_path,
annotation_files, fasta_files, sRNA_files, query_sRNA, program,
interaction_length, window_size_target, span_target,
window_size_srna, span_srna, unstructured_region_RNAplex_target,
unstructured_region_RNAplex_srna, unstructured_region_RNAup,
energy_threshold, duplex_distance, top, starget_output_folder,
process_rnaplex, process_rnaup, process_intarna, continue_rnaup,
slide_win_srna_intarna, max_loop_srna, slide_win_target_intarna,
max_loop_target, mode_intarna, potential_target_start,
potential_target_end, target_feature):
self.rnaplfold_path = rnaplfold_path
self.rnaplex_path = rnaplex_path
self.rnaup_path = rnaup_path
self.intarna_path = intarna_path
self.gffs = self._gen_copy_new_folder(
[".gff", ".gff3"], starget_output_folder, "tmp_anno",
annotation_files, ["--annotation_files"])
self.fastas = self._gen_copy_new_folder(
[".fa", ".fna", ".fasta"], starget_output_folder,
"tmp_fasta", fasta_files, ["--fasta_files"])
self.srnas = self._gen_copy_new_folder(
[".gff", ".gff3"], starget_output_folder, "tmp_srna", sRNA_files,
["--srna_files"])
if "all" not in query_sRNA:
for q in query_sRNA:
data = q.split(":")
if (len(data) == 4) and (data[1].isdigit()) and (
data[2].isdigit()):
pass
else:
print("Error: the --query_srna does not be assigned properly!\n")
sys.exit()
self.query = query_sRNA
self.program = program
self.inter_length = interaction_length
self.win_size_t = window_size_target
self.span_t = span_target
self.win_size_s = window_size_srna
self.span_s = span_srna
self.unstr_region_rnaplex_t = unstructured_region_RNAplex_target
self.unstr_region_rnaplex_s = unstructured_region_RNAplex_srna
self.unstr_region_rnaup = unstructured_region_RNAup
self.energy = energy_threshold
self.duplex_dist = duplex_distance
self.top = top
self.out_folder = starget_output_folder
self.core_plex = process_rnaplex
self.core_up = process_rnaup
self.core_inta = process_intarna
self.slide_win_srna = slide_win_srna_intarna
self.slide_win_target = slide_win_target_intarna
self.max_loop_srna = max_loop_srna
self.max_loop_target = max_loop_target
self.mode_intarna = mode_intarna
self.continue_rnaup = continue_rnaup
self.tar_start = potential_target_start
self.tar_end = potential_target_end
self.features = target_feature
return self
def container_goterm(self, annotation_files, goterm_output_folder,
UniProt_id, go_obo, goslim_obo, transcript_files):
self.gffs = self._gen_copy_new_folder(
[".gff", ".gff3"], goterm_output_folder, "tmp_anno", annotation_files,
["--annotation_files"])
self.out_folder = goterm_output_folder
self.uniprot = UniProt_id
self.go = go_obo
self.goslim = goslim_obo
self.trans = self._gen_copy_new_folder(
[".gff", ".gff3"], goterm_output_folder, "tmp_ta", transcript_files,
["--transcript_files"])
return self
def container_sublocal(self, Psortb_path, annotation_files, fasta_files,
bacteria_type, difference_multi,
sublocal_output_folder, transcript_files):
self.psortb_path = Psortb_path
self.gffs = self._gen_copy_new_folder(
[".gff", ".gff3"], sublocal_output_folder, "tmp_anno",
annotation_files, ["--annotation_files"])
self.fastas = self._gen_copy_new_folder(
[".fa", ".fna", ".fasta"], sublocal_output_folder,
"tmp_fa", fasta_files, ["--fasta_files"])
self.gram = bacteria_type
self.fuzzy = difference_multi
self.out_folder = sublocal_output_folder
self.trans = self._gen_copy_new_folder(
[".gff", ".gff3"], sublocal_output_folder, "tmp_ta",
transcript_files, ["--transcript_files"])
return self
def container_ppi(self, annotation_files, proteinID_strains,
without_strain_pubmed, species_STRING, score,
ppi_output_folder, node_size, query):
self.ptts = self._gen_copy_new_folder(
[".gff", ".gff3"], ppi_output_folder, "temp_anno",
annotation_files, ["--annotation_files"])
self.strains = proteinID_strains
self.no_specific = without_strain_pubmed
self.species = species_STRING
self.score = score
self.out_folder = ppi_output_folder
self.size = node_size
self.querys = query
return self
def container_promoter(self, MEME_path, GLAM2_path, out_folder, tex_libs,
TSS_files, fasta_files, num_motif, nt_before_TSS,
motif_width, TSS_source, annotation_files, end_run,
combine_all, e_value, para, program, use_tss_type):
self.meme_path = MEME_path
self.glam2_path = GLAM2_path
self.program = program
self.end_run = end_run
if (program.lower() != "both") and (
program.lower() != "meme") and (
program.lower() != "glam2"):
print("Error: Please assign meme or glam2 or both to --program.")
sys.exit()
self.output_folder = out_folder
self.tsss = self._gen_copy_new_folder(
[".gff", ".gff3"], out_folder, "tmp_tss", TSS_files, ["--tss_files"])
self.fastas = self._gen_copy_new_folder(
[".fa", ".fna", ".fasta"], out_folder, "tmp_fasta",
fasta_files, ["--fasta_files"])
self.num_motif = num_motif
self.nt_before = nt_before_TSS
self.widths = motif_width
self.source = TSS_source
self.tex_wigs = None
self.frag_wigs = None
self.gffs = self._gen_copy_new_folder(
[".gff", ".gff3"], out_folder, "tmp_anno", annotation_files,
["--annotation_files"])
self.combine = combine_all
self.e_value = e_value
self.para = para
self.use_tss = use_tss_type
if not TSS_source:
if annotation_files is None:
print("Error: if --tss_source is False, please assign "
"--annotation_files as well!")
sys.exit()
if tex_libs is not None:
self.helper.check_make_folder(os.path.join(
out_folder, "tmp_wig"))
self.tex_wigs = self._create_wig_folder(
os.path.join(out_folder, "tmp_wig", "tex_notex"),
tex_libs)
tracks = {"file": [], "track": {"+": [], "-": []}}
self.input_libs = self._create_working_wigs(
out_folder, tex_libs, self.tex_wigs, tracks)
self.libs = self.input_libs
self = self._parser_combine_wigs("promoter")
return self
def container_operon(self, TSS_files, annotation_files,
transcript_files,
term_files, TSS_fuzzy, term_fuzzy, min_length,
operon_output_folder, operon_statistics_folder):
self.tsss = self._gen_copy_new_folder(
[".gff", ".gff3"], operon_output_folder, "tmp_tss",
TSS_files, ["--tss_files"])
self.gffs = self._gen_copy_new_folder(
[".gff", ".gff3"], operon_output_folder, "tmp_anno",
annotation_files, ["--annotation_files"])
self.trans = self._gen_copy_new_folder(
[".gff", ".gff3"], operon_output_folder, "tmp_ta",
transcript_files, ["--transcript_files"])
self.terms = self._gen_copy_new_folder(
[".gff", ".gff3"], operon_output_folder, "tmp_term",
term_files, ["--term_files"])
self.tss_fuzzy = TSS_fuzzy
self.term_fuzzy = term_fuzzy
self.length = min_length
self.output_folder = operon_output_folder
self.stat_folder = operon_statistics_folder
return self
def container_snp(self, samtools_path, bcftools_path, bam_type,
program, fasta_files, bam_files,
quality, read_depth_range, snp_output_folder,
indel_fraction, chrom, rg, caller, filters, DP4_cutoff):
self.samtools_path = samtools_path
self.bcftools_path = bcftools_path
self.types = bam_type
self.program = program
self.fastas = self._gen_copy_new_folder(
[".fa", ".fna", ".fasta"], snp_output_folder,
"tmp_fa", fasta_files, ["--fasta_files"])
self.bams = bam_files
self.quality = quality
self.depth_s = read_depth_range.split(",")[0]
self.depth_b = read_depth_range.split(",")[-1]
self.out_folder = snp_output_folder
self.idv = indel_fraction.split(",")[0]
self.imf = indel_fraction.split(",")[-1]
if chrom == "haploid":
chrom = "1"
elif chrom == "diploid":
chrom = "2"
self.chrom = chrom
self.rg = rg
self.caller = caller
self.filters = filters
self.dp4_sum = DP4_cutoff.split(",")[0]
self.dp4_frac = DP4_cutoff.split(",")[-1]
return self
def container_circrna(self, process, fasta_files, annotation_files,
bam_files, read_files,
circrna_stat_folder, support_reads, segemehl_path,
testrealign, samtools_path, start_ratio,
end_ratio, ignore_hypothetical_protein, out_folder):
self.cores = process
self.fastas = self._gen_copy_new_folder(
[".fa", ".fna", ".fasta"], out_folder, "tmp_fa", fasta_files,
["--fasta_files"])
self.gffs = self._gen_copy_new_folder(
[".gff", ".gff3"], out_folder, "tmp_anno", annotation_files,
["--annotation_files"])
self.bams = bam_files
self.read_files = read_files
self.stat_folder = circrna_stat_folder
self.support = support_reads
self.segemehl_path = segemehl_path
self.testrealign_path = testrealign
self.samtools_path = samtools_path
self.start_ratio = start_ratio
self.end_ratio = end_ratio
self.hypo = ignore_hypothetical_protein
self.output_folder = out_folder
return self
def container_ribos(self, program, thermo_ID, cmscan_path, cmpress_path,
riboswitch_ID, annotation_files, fasta_files,
tss_files, transcript_files, Rfam, ribos_output_folder,
thermo_output_folder, cutoff, output_all,
database_folder, fuzzy, without_rbs, rbs_seq,
fuzzy_rbs, UTR_length):
self.program = program
self.without_rbs = without_rbs
self.rbs_seq = rbs_seq
if (program.lower() == "riboswitch") or (
program.lower() == "both"):
output = ribos_output_folder
elif (program.lower() == "thermometer"):
output = thermo_output_folder
self.thermo_id = thermo_ID
self.cmscan_path = cmscan_path
self.cmpress_path = cmpress_path
self.ribos_id = riboswitch_ID
self.gffs = self._gen_copy_new_folder(
[".gff", ".gff3"], output, "temp_anno", annotation_files,
["--annotation_files"])
self.fastas = self._gen_copy_new_folder(
[".fa", ".fna", ".fasta"], output, "temp_fa",
fasta_files, ["--fasta_files"])
self.tsss = self._gen_copy_new_folder(
[".gff", ".gff3"], output, "temp_tss", tss_files, ["--tss_files"])
self.trans = self._gen_copy_new_folder(
[".gff", ".gff3"], output, "temp_ta", transcript_files,
["--transcript_files"])
self.rfam = Rfam
self.ribos_out_folder = ribos_output_folder
self.thermo_out_folder = thermo_output_folder
self.cutoff = cutoff
self.output_all = output_all
self.database = database_folder
self.fuzzy = fuzzy
self.fuzzy_rbs = fuzzy_rbs
self.utr = UTR_length
return self
def container_cris(self, fasta_files, annotation_files, CRT_path,
window_size, min_number_repeat, min_length_repeat,
Max_length_repeat, min_length_spacer, Max_length_spacer,
cris_out_folder, ignore_hypo):
self.gffs = self._gen_copy_new_folder(
[".gff", ".gff3"], cris_out_folder, "tmp_anno", annotation_files,
["--annotation_files"])
self.fastas = self._gen_copy_new_folder(
[".fa", ".fna", ".fasta"], cris_out_folder, "tmp_fa",
fasta_files, ["--fasta_files"])
self.crt_path = CRT_path
self.win_size = window_size
self.out_folder = cris_out_folder
self.min_num_r = min_number_repeat
self.min_len_r = min_length_repeat
self.max_len_r = Max_length_repeat
self.min_len_s = min_length_spacer
self.max_len_s = Max_length_spacer
self.ignore_hypo = ignore_hypo
return self
def container_screen(self, main_gff, side_gffs, fasta, height, tex_libs,
frag_libs, present, output_folder):
self.main_gff = main_gff
self.side_gffs = side_gffs
self.fasta = fasta
self.height = height
self.tlibs = tex_libs
self.flibs = frag_libs
self.present = present
self.output_folder = output_folder
return self | ANNOgesic | /ANNOgesic-1.1.14.linux-x86_64.tar.gz/usr/local/lib/python3.10/dist-packages/annogesiclib/args_container.py | args_container.py |
import os
import sys
import shutil
from subprocess import call
from annogesiclib.helper import Helper
from annogesiclib.multiparser import Multiparser
from annogesiclib.converter import Converter
from annogesiclib.get_inter_seq import intergenic_seq
from annogesiclib.extract_sec_info import extract_info_sec
from annogesiclib.get_polyT import poly_t
from annogesiclib.detect_coverage_term import detect_coverage
from annogesiclib.gff3 import Gff3Parser
from annogesiclib.stat_term import stat_term
from annogesiclib.compare_tran_term import compare_term_tran
from annogesiclib.reorganize_table import reorganize_table
class Terminator(object):
'''detection of terminator'''
def __init__(self, args_term):
self.multiparser = Multiparser()
self.helper = Helper()
self.converter = Converter()
self.gff_parser = Gff3Parser()
self.gff_path = os.path.join(args_term.gffs, "tmp")
self.fasta_path = os.path.join(args_term.fastas, "tmp")
self.tran_path = os.path.join(args_term.trans, "tmp")
self.outfolder = {"term": os.path.join(args_term.out_folder, "gffs"),
"csv": os.path.join(args_term.out_folder, "tables")}
self.terms = {"all": os.path.join(self.outfolder["term"],
"all_candidates"),
"express": os.path.join(self.outfolder["term"],
"expressed_candidates"),
"best": os.path.join(self.outfolder["term"],
"best_candidates"),
"non": os.path.join(self.outfolder["term"],
"non_expressed_candidates")}
self.csvs = {"all": os.path.join(self.outfolder["csv"],
"all_candidates"),
"express": os.path.join(self.outfolder["csv"],
"expressed_candidates"),
"best": os.path.join(self.outfolder["csv"],
"best_candidates"),
"non": os.path.join(self.outfolder["csv"],
"non_expressed_candidates")}
self.combine_path = os.path.join(self.gff_path, "combine")
self.tmps = {"transterm": os.path.join(os.getcwd(), "tmp_transterm"),
"hp": "transtermhp", "hp_gff": "transtermhp.gff",
"hp_path": "tmp_transterm/tmp",
"term_table": os.path.join(os.getcwd(), "tmp_term_table"),
"merge": os.path.join(os.getcwd(), "tmp_merge_gff"),
"gff": "tmp.gff",
"folder": os.path.join(os.getcwd(), "tmp")}
self.suffixs = {"gff": "term.gff", "csv": "term.csv",
"allgff": "term_all.gff"}
if args_term.srnas:
self.srna_path = os.path.join(args_term.srnas, "tmp")
else:
self.srna_path = None
self._make_gff_folder()
def _combine_annotation(self, combine_file, files):
with open(combine_file, 'w') as result:
for file_ in files:
if (file_.endswith(".ptt")) and (os.stat(file_).st_size == 0):
print("Warning: No CDS information, "
"TransTermHP can not work!")
return "NO_CDS"
if os.path.exists(file_) and (
os.stat(file_).st_size != 0):
check_start = False
fh = open(file_, 'r')
for line in fh:
if check_start:
result.write(line)
if "Location" in line:
check_start = True
if "\n" not in line:
result.write("\n")
fh.close()
return "Normal"
def _make_gff_folder(self):
self.helper.check_make_folder(self.terms["all"])
self.helper.check_make_folder(self.csvs["all"])
self.helper.check_make_folder(self.terms["best"])
self.helper.check_make_folder(self.csvs["best"])
self.helper.check_make_folder(self.terms["express"])
self.helper.check_make_folder(self.csvs["express"])
self.helper.check_make_folder(self.terms["non"])
self.helper.check_make_folder(self.csvs["non"])
def _convert_gff2rntptt(self, gff_path, fasta_path, sRNAs, log):
file_types = {}
prefixs = []
for gff in os.listdir(gff_path):
if gff.endswith(".gff"):
filename = gff.split("/")
prefix = filename[-1][:-4]
prefixs.append(prefix)
gff_file = os.path.join(gff_path, gff)
rnt_file = os.path.join(gff_path, gff.replace(".gff", ".rnt"))
ptt_file = os.path.join(gff_path, gff.replace(".gff", ".ptt"))
fasta = self.helper.get_correct_file(
fasta_path, ".fa", prefix, None, None)
if not fasta:
log.write("{0}.fa can not be found.\n".format(prefix))
print("Error: {0}.fa can not be found!".format(prefix))
sys.exit()
if sRNAs:
self.multiparser.parser_gff(sRNAs, "sRNA")
srna = self.helper.get_correct_file(
self.srna_path, "_sRNA.gff", prefix, None, None)
if (srna) and (fasta):
log.write("Running converter.py to convert {0} and "
"{1} to {2}, {3}, and {4}.\n".format(
gff_file, srna, ptt_file, rnt_file,
srna.replace(".gff", ".rnt")))
self.converter.convert_gff2rntptt(
gff_file, prefix, fasta, ptt_file, rnt_file, srna,
srna.replace(".gff", ".rnt"))
file_types[prefix] = "srna"
log.write("The following files are generated:\n")
log.write("\t{0}\n\t{1}\n\t{2}\n".format(
ptt_file, rnt_file, srna.replace(".gff", ".rnt")))
if (not srna) and (fasta):
log.write("Running converter.py to convert {0} "
"to {1}, and {2}.\n".format(
gff_file, ptt_file, rnt_file))
self.converter.convert_gff2rntptt(
gff_file, prefix, fasta, ptt_file, rnt_file, None, None)
file_types[prefix] = "normal"
log.write("The following files are generated:\n")
log.write("\t{0}\n\t{1}\n".format(ptt_file, rnt_file))
else:
log.write("Running converter.py to convert {0} "
"to {1}, and {2}.\n".format(
gff_file, ptt_file, rnt_file))
self.converter.convert_gff2rntptt(
gff_file, prefix, fasta, ptt_file, rnt_file, None, None)
file_types[prefix] = "normal"
log.write("The following files are generated:\n")
log.write("\t{0}\n\t{1}\n".format(ptt_file, rnt_file))
return file_types, prefixs
def _combine_ptt_rnt(self, gff_path, file_types, srna_path):
self.helper.check_make_folder(self.combine_path)
for prefix, file_type in file_types.items():
combine_file = os.path.join(self.combine_path, prefix + '.ptt')
if file_type == "normal":
files = [os.path.join(gff_path, prefix + ".ptt"),
os.path.join(gff_path, prefix + ".rnt")]
check = self._combine_annotation(combine_file, files)
elif file_type == "srna":
files = [os.path.join(gff_path, prefix + ".ptt"),
os.path.join(gff_path, prefix + ".rnt"),
os.path.join(srna_path,
"_".join([prefix, "sRNA.rnt"]))]
check = self._combine_annotation(combine_file, files)
return check
def _TransTermHP(self, fasta, file_, out_path, prefix, out, args_term, log):
call([args_term.TransTermHP_path, "-p", args_term.expterm_path,
fasta, os.path.join(self.combine_path, file_), "--t2t-perf",
os.path.join(out_path, "_".join([
prefix,
"terminators_within_robust_tail-to-tail_regions.t2t"])),
"--bag-output", os.path.join(out_path, "_".join([
prefix, "best_terminator_after_gene.bag"]))],
stdout=out)
log.write(" ".join([args_term.TransTermHP_path, "-p", args_term.expterm_path,
fasta, os.path.join(self.combine_path, file_), "--t2t-perf",
os.path.join(out_path, "_".join([
prefix,
"terminators_within_robust_tail-to-tail_regions.t2t"])),
"--bag-output", os.path.join(out_path, "_".join([
prefix, "best_terminator_after_gene.bag"]))]) + "\n")
def _run_TransTermHP(self, args_term, log):
self.helper.check_make_folder(self.tmps["transterm"])
log.write("Running TransTermHP.\n")
log.write("Make sure the version is at least 2.09.\n")
for file_ in os.listdir(self.combine_path):
if ".ptt" in file_:
prefix = file_.replace(".ptt", "")
fasta = self.helper.get_correct_file(
self.fasta_path, ".fa", prefix, None, None)
if not fasta:
log.write("{0}.fa can not be found!.\n".format(prefix))
print("Error: {0}.fa can not be found!".format(prefix))
sys.exit()
out_path = os.path.join(args_term.hp_folder, prefix)
self.helper.check_make_folder(out_path)
out = open(os.path.join(out_path,
"_".join([prefix, "terminators.txt"])), "w")
self._TransTermHP(fasta, file_, out_path,
prefix, out, args_term, log)
log.write("Done!\n")
log.write("The following files are generated in {0}.\n".format(
out_path))
for file_ in os.listdir(out_path):
log.write("\t" + file_ + "\n")
out.close()
shutil.rmtree(self.combine_path)
def _convert_to_gff(self, prefixs, args_term, log):
log.write("Running coverter.py to convert the results of TransTermHP "
"to gff3 format.\n")
for prefix in prefixs:
for folder in os.listdir(args_term.hp_folder):
if prefix == folder:
out_path = os.path.join(args_term.hp_folder, folder)
for file_ in os.listdir(out_path):
if file_.endswith(".bag"):
out_file = os.path.join(
self.tmps["transterm"],
"_".join([prefix, self.tmps["hp_gff"]]))
self.converter.convert_transtermhp2gff(
os.path.join(out_path, file_), out_file)
log.write("\t" + out_file + " is generated.\n")
self.multiparser.combine_gff(args_term.gffs, self.tmps["transterm"],
None, self.tmps["hp"])
def _combine_wigs(self, args_term):
if (args_term.tex_wigs is not None) and (
args_term.frag_wigs is not None):
folder = args_term.tex_wigs.split("/")
folder = "/".join(folder[:-1])
merge_wigs = os.path.join(folder, "merge_wigs")
self.helper.check_make_folder(merge_wigs)
for wig in os.listdir(args_term.tex_wigs):
if os.path.isdir(os.path.join(args_term.tex_wigs, wig)):
pass
else:
shutil.copy(os.path.join(args_term.tex_wigs, wig),
merge_wigs)
for wig in os.listdir(args_term.frag_wigs):
if os.path.isdir(os.path.join(args_term.frag_wigs, wig)):
pass
else:
shutil.copy(os.path.join(args_term.frag_wigs, wig),
merge_wigs)
elif (args_term.tex_wigs is not None):
merge_wigs = args_term.tex_wigs
elif (args_term.frag_wigs is not None):
merge_wigs = args_term.frag_wigs
else:
print("Error: Wiggle files are not assigned!")
sys.exit()
return merge_wigs
def _merge_sRNA(self, sRNAs, prefixs, gff_path):
'''searching the terminator with sRNA information'''
if sRNAs is not None:
self.multiparser.parser_gff(sRNAs, "sRNA")
self.helper.check_make_folder(self.tmps["merge"])
for prefix in prefixs:
tmp_gff = os.path.join(self.tmps["merge"], self.tmps["gff"])
if self.tmps["gff"] in os.listdir(self.tmps["merge"]):
os.remove(tmp_gff)
self.helper.merge_file(os.path.join(gff_path, prefix + ".gff"),
tmp_gff)
self.helper.merge_file(os.path.join(
self.srna_path, "_".join([prefix, "sRNA.gff"])), tmp_gff)
self.helper.sort_gff(tmp_gff, os.path.join(
self.tmps["merge"], prefix + ".gff"))
os.remove(tmp_gff)
merge_path = self.tmps["merge"]
else:
merge_path = gff_path
return merge_path
def _move_file(self, term_outfolder, csv_outfolder):
for gff in os.listdir(term_outfolder):
if gff.endswith("_term.gff"):
self.helper.sort_gff(os.path.join(term_outfolder, gff),
self.tmps["gff"])
shutil.move(self.tmps["gff"],
os.path.join(term_outfolder, gff))
prefix = gff.replace("_term.gff", "")
new_gff = os.path.join(self.terms["all"], "_".join([
prefix, self.suffixs["allgff"]]))
csv_file = os.path.join(
os.path.join(self.csvs["all"], "_".join([
prefix, self.suffixs["csv"]])))
out = open(new_gff, "w")
out.write("##gff-version 3\n")
out.close()
self.helper.merge_file(
os.path.join(term_outfolder, gff),
os.path.join(
self.terms["all"], "_".join([
prefix, self.suffixs["allgff"]])))
os.remove(os.path.join(term_outfolder, gff))
pre_strain = ""
if ("_".join([prefix, self.suffixs["csv"]]) in
os.listdir(self.csvs["all"])):
os.remove(csv_file)
out_csv = open(csv_file, "w")
out_csv.write("\t".join(["Genome", "Name", "Start", "End",
"Strand", "Detect", "Coverage_decrease",
"Coverage_detail"]) + "\n")
out_csv.close()
fh = open(new_gff)
for entry in self.gff_parser.entries(fh):
if entry.seq_id != pre_strain:
self.helper.merge_file(os.path.join(
self.tmps["term_table"], "_".join([
entry.seq_id, "term_raw.csv"])),
os.path.join(self.csvs["all"], "_".join([
prefix, self.suffixs["csv"]])))
pre_strain = entry.seq_id
fh.close()
def _run_rnafold(self, RNAfold_path, tmp_seq, tmp_sec, prefix, log):
log.write("Computing secondray structures of {0}.\n".format(prefix))
log.write("Make sure the version of Vienna RNA package is at least 2.3.2.\n")
print("Computing secondray structures of {0}".format(prefix))
self.helper.check_make_folder(self.tmps["folder"])
pre_cwd = os.getcwd()
os.chdir(self.tmps["folder"])
log.write(" ".join([RNAfold_path, "<", os.path.join("..", tmp_seq),
">", os.path.join("..", tmp_sec)]) + "\n")
os.system(" ".join([RNAfold_path, "<", os.path.join("..", tmp_seq),
">", os.path.join("..", tmp_sec)]))
log.write("Done!\n")
log.write("\t" + tmp_sec + " is generated for storing secondary "
"structure.\n")
os.chdir(pre_cwd)
shutil.rmtree(self.tmps["folder"])
def _compute_intersection_forward_reverse(
self, prefixs, merge_path, wig_path, merge_wigs, args_term, log):
'''the approach for searching gene converged region terminator'''
log.write("Searching terminators which located in gene converged "
"region.\n")
for prefix in prefixs:
tmp_seq = os.path.join(args_term.out_folder,
"_".join(["inter_seq", prefix]))
tmp_index = os.path.join(args_term.out_folder,
"_".join(["inter_index", prefix]))
tmp_sec = os.path.join(args_term.out_folder,
"_".join(["inter_sec", prefix]))
tran_file = os.path.join(self.tran_path,
"_".join([prefix, "transcript.gff"]))
gff_file = os.path.join(merge_path, prefix + ".gff")
tmp_cand = tmp_cand = os.path.join(args_term.out_folder,
"_".join(["term_candidates", prefix]))
if os.path.exists(tran_file):
print("Extracting sequences of {0}".format(prefix))
log.write("Running get_inter_seq.py to extract the potential "
"sequences from {0}.\n".format(prefix))
intergenic_seq(os.path.join(self.fasta_path, prefix + ".fa"),
tran_file, gff_file, tmp_seq, tmp_index, args_term)
log.write("\t" + tmp_seq + " is generated for storing the "
"potential sequences.\n")
self._run_rnafold(args_term.RNAfold_path, tmp_seq, tmp_sec,
prefix, log)
log.write("Running extract_sec_info.py to extract the "
"information of secondary structure from {0}.\n".format(
prefix))
extract_info_sec(tmp_sec, tmp_seq, tmp_index)
os.remove(tmp_index)
log.write("Running get_polyT.py to detect the "
"terminator candidates for {0}.\n".format(prefix))
poly_t(tmp_seq, tmp_sec, gff_file, tran_file, tmp_cand, args_term)
log.write("\t" + tmp_cand + " which temporary stores terminator "
"candidates is generated.\n")
print("Detecting terminators for " + prefix)
log.write("Running detect_coverage_term.py to gain "
"high-confidence terminators for {0}.\n".format(prefix))
detect_coverage(
tmp_cand, os.path.join(merge_path, prefix + ".gff"),
os.path.join(self.tran_path, "_".join([
prefix, "transcript.gff"])),
os.path.join(self.fasta_path, prefix + ".fa"),
os.path.join(wig_path, "_".join([prefix, "forward.wig"])),
os.path.join(wig_path, "_".join([prefix, "reverse.wig"])),
os.path.join(self.tmps["hp_path"], "_".join([
prefix, self.tmps["hp_gff"]])), merge_wigs,
os.path.join(self.outfolder["term"], "_".join([
prefix, self.suffixs["gff"]])),
os.path.join(self.tmps["term_table"], "_".join([
prefix, "term_raw.csv"])), args_term)
self.multiparser.combine_gff(args_term.gffs, self.outfolder["term"],
None, "term")
self._move_file(self.outfolder["term"], self.outfolder["csv"])
def _remove_tmp_file(self, merge_wigs, args_term):
self.helper.remove_tmp_dir(args_term.gffs)
self.helper.remove_tmp_dir(args_term.fastas)
if args_term.srnas is not None:
self.helper.remove_tmp(args_term.srnas)
shutil.rmtree(self.tmps["merge"])
if (args_term.tex_wigs is not None) and (
args_term.frag_wigs is not None):
shutil.rmtree(merge_wigs)
self.helper.remove_tmp_dir(args_term.trans)
if "tmp_wig" in os.listdir(args_term.out_folder):
shutil.rmtree(os.path.join(args_term.out_folder, "tmp_wig"))
self.helper.remove_tmp(self.outfolder["term"])
shutil.rmtree(self.tmps["transterm"])
shutil.rmtree(self.tmps["term_table"])
self.helper.remove_all_content(args_term.out_folder,
"inter_seq_", "file")
self.helper.remove_all_content(self.outfolder["term"],
"_term.gff", "file")
self.helper.remove_all_content(args_term.out_folder,
"inter_sec_", "file")
self.helper.remove_all_content(args_term.out_folder,
"term_candidates_", "file")
def _compute_stat(self, args_term, log):
new_prefixs = []
for gff in os.listdir(self.terms["all"]):
if gff.endswith("_term_all.gff"):
out_tmp = open(self.tmps["gff"], "w")
out_tmp.write("##gff-version 3\n")
new_prefix = gff.replace("_term_all.gff", "")
new_prefixs.append(gff.replace("_term_all.gff", ""))
num = 0
fh = open(os.path.join(self.terms["all"], gff))
for entry in self.gff_parser.entries(fh):
name = '%0*d' % (5, num)
entry.attributes["ID"] = (
entry.seq_id + "_terminator" + str(num))
entry.attributes["Name"] = "_".join(["terminator_" + name])
entry.attribute_string = ";".join([
"=".join(items) for items in entry.attributes.items()])
out_tmp.write("\t".join([entry.info_without_attributes,
entry.attribute_string]) + "\n")
num += 1
out_tmp.close()
fh.close()
shutil.move(self.tmps["gff"], os.path.join(self.terms["all"],
"_".join([new_prefix, self.suffixs["gff"]])))
log.write("Running stat_term.py to do statistics.\n")
stat_path = os.path.join(args_term.out_folder, "statistics")
log.write("The following files are generated:\n")
for prefix in new_prefixs:
stat_term(os.path.join(self.terms["all"],
"_".join([prefix, self.suffixs["gff"]])),
os.path.join(self.csvs["all"],
"_".join([prefix, self.suffixs["csv"]])),
os.path.join(stat_path,
"_".join(["stat", prefix + ".csv"])),
os.path.join(self.terms["best"],
"_".join([prefix, "term"])),
os.path.join(self.terms["express"],
"_".join([prefix, "term"])),
os.path.join(self.terms["non"],
"_".join([prefix, "term"])))
shutil.move(os.path.join(self.terms["best"],
"_".join([prefix, self.suffixs["csv"]])),
os.path.join(self.csvs["best"],
"_".join([prefix, self.suffixs["csv"]])))
shutil.move(os.path.join(self.terms["express"],
"_".join([prefix, self.suffixs["csv"]])),
os.path.join(self.csvs["express"],
"_".join([prefix, self.suffixs["csv"]])))
shutil.move(os.path.join(self.terms["non"],
"_".join([prefix, self.suffixs["csv"]])),
os.path.join(self.csvs["non"],
"_".join([prefix, self.suffixs["csv"]])))
os.remove(os.path.join(self.terms["all"],
"_".join([prefix, self.suffixs["allgff"]])))
log.write("\t" + os.path.join(self.terms["all"],
"_".join([prefix, self.suffixs["gff"]])) + "\n")
log.write("\t" + os.path.join(self.terms["best"],
"_".join([prefix, self.suffixs["gff"]])) + "\n")
log.write("\t" + os.path.join(self.terms["express"],
"_".join([prefix, self.suffixs["gff"]])) + "\n")
log.write("\t" + os.path.join(self.terms["non"],
"_".join([prefix, self.suffixs["gff"]])) + "\n")
log.write("\t" + os.path.join(self.csvs["all"],
"_".join([prefix, self.suffixs["csv"]])) + "\n")
log.write("\t" + os.path.join(stat_path,
"_".join(["stat", prefix + ".csv"])) + "\n")
log.write("\t" + os.path.join(self.csvs["best"],
"_".join([prefix, self.suffixs["csv"]])) + "\n")
log.write("\t" + os.path.join(self.csvs["express"],
"_".join([prefix, self.suffixs["csv"]])) + "\n")
log.write("\t" + os.path.join(self.csvs["non"],
"_".join([prefix, self.suffixs["csv"]])) + "\n")
def _check_gff_file(self, folder):
for file_ in os.listdir(folder):
if file_.endswith(".gff"):
self.helper.check_uni_attributes(os.path.join(folder, file_))
def _compare_term_tran(self, args_term, prefixs, log):
'''searching the associated terminator to transcript'''
self.multiparser.combine_gff(args_term.gffs, self.tran_path,
None, "transcript")
prefixs = []
print("Comparing terminators with transcripts now")
for file_ in os.listdir(self.tran_path):
if file_.endswith("_transcript.gff"):
prefixs.append(file_.replace("_transcript.gff", ""))
log.write("Running compare_tran_term.py for comparing transcripts "
"and terminators.\n")
log.write("The following files are generated:\n")
for type_ in ("best_candidates", "expressed_candidates",
"all_candidates"):
compare_term_tran(self.tran_path,
os.path.join(self.outfolder["term"], type_),
args_term.fuzzy_up_ta, args_term.fuzzy_down_ta,
args_term.out_folder, "terminator",
self.outfolder["term"], args_term.trans)
for prefix in prefixs:
shutil.move(
os.path.join(
args_term.out_folder, "statistics",
"stat_compare_transcript_terminator_" + prefix + ".csv"),
os.path.join(
args_term.out_folder, "statistics",
"_".join(["stat_compare_terminator_transcript", prefix,
type_ + ".csv"])))
log.write("\t" + os.path.join(
args_term.out_folder, "statistics",
"_".join(["stat_compare_terminator_transcript", prefix,
type_ + ".csv"])) + "\n")
def _re_table(self, args_term, prefixs, log):
log.write("Running re_table.py to generate coverage information.\n")
log.write("The following files are updated:\n")
for type_ in ["all_candidates", "best_candidates",
"expressed_candidates", "non_expressed_candidates"]:
for table in os.listdir(os.path.join(
args_term.out_folder, "tables", type_)):
term_table = os.path.join(args_term.out_folder, "tables",
type_, table)
reorganize_table(args_term.libs, args_term.merge_wigs,
"Coverage_detail", term_table)
log.write("\t" + term_table + "\n")
def run_terminator(self, args_term, log):
self._check_gff_file(args_term.gffs)
self._check_gff_file(args_term.trans)
self.multiparser.parser_fasta(args_term.fastas)
if (not args_term.gffs) or (not args_term.fastas):
print("Error: Please assign gff files "
"and fasta files!")
sys.exit()
file_types, prefixs = self._convert_gff2rntptt(
self.gff_path, self.fasta_path, args_term.srnas, log)
check = self._combine_ptt_rnt(self.gff_path, file_types,
self.srna_path)
self._run_TransTermHP(args_term, log)
self._convert_to_gff(prefixs, args_term, log)
self.helper.remove_tmp(self.gff_path)
self.multiparser.parser_gff(args_term.trans, "transcript")
self.helper.check_make_folder(self.tmps["term_table"])
if check != "NO_CDS":
self.multiparser.parser_gff(self.tmps["transterm"],
self.tmps["hp"])
merge_path = self._merge_sRNA(args_term.srnas, prefixs, self.gff_path)
self._compute_intersection_forward_reverse(
prefixs, merge_path, args_term.wig_path,
args_term.merge_wigs, args_term, log)
self._compute_stat(args_term, log)
self._compare_term_tran(args_term, prefixs, log)
self._re_table(args_term, prefixs, log)
self._remove_tmp_file(args_term.merge_wigs, args_term) | ANNOgesic | /ANNOgesic-1.1.14.linux-x86_64.tar.gz/usr/local/lib/python3.10/dist-packages/annogesiclib/terminator.py | terminator.py |
import os
import sys
from glob import glob
from annogesiclib.gff3 import Gff3Parser
from annogesiclib.parser_wig import WigParser
def load_wigs(out, lib_t, lib_n, lib_f):
if lib_t and lib_n:
for index in range(len(lib_t)):
out.write("load {0}\n".format(
os.path.join(os.getcwd(), lib_t[index])))
out.write("load {0}\n".format(
os.path.join(os.getcwd(), lib_n[index])))
elif lib_t:
for lib in lib_t:
out.write("load {0}\n".format(os.path.join(os.getcwd(), lib)))
elif lib_n:
for lib in lib_n:
out.write("load {0}\n".format(os.path.join(os.getcwd(), lib)))
if lib_f:
for lib in lib_f:
out.write("load {0}\n".format(os.path.join(os.getcwd(), lib)))
def set_data_range(out, gff, wigs, strand):
'''set and print the DataRange'''
max_range = 0
for strains in wigs.values():
for strain, wig_datas in strains.items():
if strain == gff.seq_id:
for wig in wig_datas[(gff.start - 1): gff.end]:
if max_range < wig.coverage:
max_range = wig.coverage
max_range = int(max_range / 1) + 10
if strand == "+":
out.write("setDataRange 0,{0}\n".format(max_range))
else:
max_range = max_range * -1
out.write("setDataRange 0,{0}\n".format(max_range))
def print_batch(args_sc, out, strand, lib_t, lib_n, lib_f, strain):
'''print the batch file'''
out.write("new\n")
out.write("genome {0}\n".format(os.path.join(os.getcwd(), args_sc.fasta)))
out.write("load {0}\n".format(os.path.join(os.getcwd(), args_sc.main_gff)))
gff = args_sc.main_gff.split("/")
out.write("{0} {1}\n".format(args_sc.present, gff[-1]))
if args_sc.side_gffs is not None:
for files in args_sc.side_gffs:
for filename in glob(files):
out.write("load {0}\n".format(os.path.join(os.getcwd(), filename)))
gff = filename.split("/")
out.write("{0} {1}\n".format(args_sc.present, gff[-1]))
load_wigs(out, lib_t, lib_n, lib_f)
out.write("maxPanelHeight {0}\n".format(args_sc.height))
if strand == "+":
out.write("snapshotDirectory {0}\n".format(
os.path.join(os.getcwd(), args_sc.output_folder,
strain, "forward")))
else:
out.write("snapshotDirectory {0}\n".format(
os.path.join(os.getcwd(), args_sc.output_folder,
strain, "reverse")))
def import_wig(lib, wigs, strand):
wig_parser = WigParser()
for wig in lib:
wigs[wig] = {}
strain = ""
wig_fh = open(wig)
for entry in wig_parser.parser(wig_fh, strand):
if strain != entry.strain:
wigs[wig][entry.strain] = []
strain = entry.strain
wigs[wig][strain].append(entry)
wig_fh.close()
def gen_batch(lib_t, lib_n, lib_f, strand, gffs, out, seq):
'''generate the batch file'''
wigs = {}
if lib_t and lib_n:
import_wig(lib_t, wigs, strand)
import_wig(lib_n, wigs, strand)
elif lib_t:
import_wig(lib_t, wigs, strand)
elif lib_n:
import_wig(lib_n, wigs, strand)
if lib_f:
import_wig(lib_f, wigs, strand)
if strand == "+":
print("Printing the forward batch files...")
else:
print("Printing the reverse batch files...")
for gff in gffs:
if gff.seq_id not in seq.keys():
print("Error: The genome names in fasta file "
"and gff file are different!!")
sys.exit()
if (gff.start - 200) <= 0:
start = 1
else:
start = gff.start - 200
if (gff.end + 200) >= len(seq[gff.seq_id]):
end = len(seq[gff.seq_id])
else:
end = gff.end + 200
out.write("goto {0}:{1}-{2}\n".format(
gff.seq_id, start, end))
set_data_range(out, gff, wigs, strand)
out.write("snapshot {0}:{1}-{2}.png\n".format(
gff.seq_id, gff.start, gff.end))
def get_length(fasta_file):
'''get sequence information and we can know the length of seq'''
seq = {}
with open(fasta_file) as fh:
for line in fh:
line = line.strip()
if line.startswith(">"):
strain = line[1:]
seq[strain] = ""
else:
seq[strain] = seq[strain] + line
return seq
def gen_screenshot(args_sc, libs, forward_file, reverse_file, strain):
'''Generation of screenshot of IGV for reveiwing of user'''
gffs_f = []
gffs_r = []
fh = open(args_sc.main_gff)
for entry in Gff3Parser().entries(fh):
if entry.strand == "+":
gffs_f.append(entry)
else:
gffs_r.append(entry)
gffs_f = sorted(gffs_f, key=lambda k: (k.seq_id, k.start, k.end, k.strand))
gffs_r = sorted(gffs_r, key=lambda k: (k.seq_id, k.start, k.end, k.strand))
out_f = open(forward_file, "w")
print_batch(args_sc, out_f, "+", libs["ft"],
libs["fn"], libs["ff"], strain)
out_r = open(reverse_file, "w")
print_batch(args_sc, out_r, "-", libs["rt"],
libs["rn"], libs["rf"], strain)
seq = get_length(args_sc.fasta)
gen_batch(libs["ft"], libs["fn"], libs["ff"], "+", gffs_f, out_f, seq)
gen_batch(libs["rt"], libs["rn"], libs["rf"], "-", gffs_r, out_r, seq)
fh.close()
out_f.close()
out_r.close() | ANNOgesic | /ANNOgesic-1.1.14.linux-x86_64.tar.gz/usr/local/lib/python3.10/dist-packages/annogesiclib/gen_screenshots.py | gen_screenshots.py |
import csv
import numpy as np
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
plt.style.use('ggplot')
def plot(subs, total, unknown, strain, prefix_name):
nums = []
nums_no_unknown = []
classes = []
classes_no_unknown = []
width = 0.4
tmp_unknown = ["Unknown", 0]
sort_subs = sorted(subs.items(),
key=lambda x: (x[1]), reverse=True)
for datas in sort_subs:
if datas[0] == "Unknown":
tmp_unknown = datas
else:
nums.append(datas[1])
nums_no_unknown.append(datas[1])
classes.append(datas[0])
classes_no_unknown.append(datas[0])
nums.append(tmp_unknown[1])
classes.append(tmp_unknown[0])
plt.figure(figsize=(12, 16))
plt.subplot(211)
ind = np.arange(len(nums))
plt.bar(ind, nums, width, color='#FF9999')
plt.title('Subcellular localization including Unknown\n', fontsize=24)
plt.ylabel('Amount', fontsize=20)
plt.yticks(fontsize=16)
plt.xlim([0, len(nums) + 1])
plt.xticks(ind+width, classes, rotation=40, fontsize=20, ha='right')
plt.tight_layout(2, None, None, None)
plt.subplot(212)
ind = np.arange(len(nums_no_unknown))
plt.bar(ind, nums_no_unknown, width, color='#FF9999')
plt.title('Subcellular localization excluding Unknown\n', fontsize=24)
plt.ylabel('Amount', fontsize=20)
plt.xlim([0, len(nums_no_unknown) + 1])
plt.xticks(ind+width, classes_no_unknown, rotation=40,
fontsize=20, ha='right')
plt.yticks(fontsize=16)
plt.tight_layout(2, None, None, None)
plt.savefig("_".join([prefix_name, strain, "sublocal.png"]))
def read_table(psortb_file):
subs = {}
subs["all_genome"] = {}
total_nums = {}
total_nums["all_genome"] = 0
unknown_nums = {}
unknown_nums["all_genome"] = 0
pre_strain = ""
f_h = open(psortb_file, "r")
for row in csv.reader(f_h, delimiter="\t"):
if not row[0].startswith("#"):
if pre_strain != row[0]:
subs[row[0]] = {}
pre_strain = row[0]
total_nums[row[0]] = 0
unknown_nums[row[0]] = 0
if row[5] not in subs[row[0]].keys():
if row[5] == "Unknown":
unknown_nums[row[0]] += 1
subs[row[0]][row[5]] = 1
total_nums[row[0]] += 1
else:
if row[5] == "Unknown":
unknown_nums[row[0]] += 1
subs[row[0]][row[5]] += 1
total_nums[row[0]] += 1
if row[5] not in subs["all_genome"].keys():
if row[5] == "Unknown":
unknown_nums["all_genome"] += 1
subs["all_genome"][row[5]] = 1
total_nums["all_genome"] += 1
else:
if row[5] == "Unknown":
unknown_nums["all_genome"] += 1
subs["all_genome"][row[5]] += 1
total_nums["all_genome"] += 1
f_h.close()
return subs, total_nums, unknown_nums
def print_file_and_plot(sub, total_nums, unknown_nums,
strain, out_stat, prefix_name):
plot(sub, total_nums[strain], unknown_nums[strain], strain, prefix_name)
out_stat.write(strain + ":\n")
out_stat.write("Total including Unknown is {0}; "
"Total excluding Unknown is {1}\n".format(
total_nums[strain],
total_nums[strain] - unknown_nums[strain]))
for local, num in sub.items():
if local != "Unknown":
out_stat.write(
"\t{0}\t{1}(including Unknown {2}; "
"excluding Unknonwn {3})\n".format(
local, num, float(num) / float(total_nums[strain]),
float(num) / (float(total_nums[strain]) - float(
unknown_nums[strain]))))
else:
out_stat.write("\t{0}\t{1}(including Unknown {2})\n".format(
local, num, float(num) / float(total_nums[strain])))
def stat_sublocal(psortb_file, prefix_name, stat_file):
subs, total_nums, unknown_nums = read_table(psortb_file)
out_stat = open(stat_file, "w")
if len(subs) > 2:
print_file_and_plot(subs["all_genome"], total_nums, unknown_nums,
"all_genome", out_stat, prefix_name)
for strain, sub in subs.items():
if strain != "all_genome":
print_file_and_plot(sub, total_nums, unknown_nums, strain,
out_stat, prefix_name) | ANNOgesic | /ANNOgesic-1.1.14.linux-x86_64.tar.gz/usr/local/lib/python3.10/dist-packages/annogesiclib/stat_sublocal.py | stat_sublocal.py |
import csv
import shutil
from annogesiclib.helper import Helper
from annogesiclib.gff3 import Gff3Parser
def import_cds(gff):
if "Name" in gff.attributes.keys():
return gff.attributes["Name"]
elif "ID" in gff.attributes.keys():
return gff.attributes["ID"]
else:
name = "".join([gff.feature, ":", str(gff.start), "-", str(gff.end),
"_", gff.strand])
return name
def check_overlap(table_file, gff_file):
out = open(table_file + "tmp", "w")
gffs = []
gff_f = open(gff_file, "r")
for entry in Gff3Parser().entries(gff_f):
if Helper().feature_without_notgene(entry):
gffs.append(entry)
fh = open(table_file, "r")
out.write("\t".join([
"Rank", "Genome", "Name", "Start", "End", "Strand",
"Start_with_TSS/Cleavage_site", "End_with_cleavage", "Candidates",
"Lib_type", "Best_avg_coverage", "Track/Coverage",
"Normalized_secondary_energy_change(by_length)", "sRNA_types",
"Conflict_sORF", "nr_hit_number", "sRNA_hit_number",
"nr_hit_top3|ID|e-value|score", "sRNA_hit|e-value|score", "Overlap_CDS_forward",
"Overlap_nts_forward", "Overlap_CDS_reverse",
"Overlap_nts_reverse","End_with_terminator",
"Associated_promoter", "sRNA_length"]) + "\n")
for row in csv.reader(fh, delimiter='\t'):
if row[3] != "Start":
overlaps = {"forward": [], "reverse": [],
"CDS_f": [], "CDS_r": []}
start = int(row[3])
end = int(row[4])
for gff in gffs:
if ((gff.end < end) and (
gff.end > start) and (
gff.start <= start)) or (
(gff.start > start) and (
gff.start < end) and (
gff.end >= end)) or (
(gff.end >= end) and (
gff.start <= start)) or (
(gff.end <= end) and (
gff.start >= start)):
overlap = min(gff.end, end) - max(gff.start, start) + 1
percent = "{0:.0f}%".format((float(overlap) / float(end - start + 1)) * 100)
if gff.strand == "+":
overlaps["forward"].append(str(overlap) + "(" + str(percent) + ")")
overlaps["CDS_f"].append(import_cds(gff))
else:
overlaps["reverse"].append(str(overlap) + "(" + str(percent) + ")")
overlaps["CDS_r"].append(import_cds(gff))
if len(overlaps["forward"]) == 0:
overlaps["forward"] = ["NA"]
overlaps["CDS_f"] = ["NA"]
if len(overlaps["reverse"]) == 0:
overlaps["reverse"] = ["NA"]
overlaps["CDS_r"] = ["NA"]
out.write("\t".join(row[0:19] + [";".join(overlaps["CDS_f"]), ";".join(overlaps["forward"]),
";".join(overlaps["CDS_r"]), ";".join(overlaps["reverse"])] +
row[21:]) + "\n")
shutil.move(table_file + "tmp", table_file) | ANNOgesic | /ANNOgesic-1.1.14.linux-x86_64.tar.gz/usr/local/lib/python3.10/dist-packages/annogesiclib/check_srna_overlap.py | check_srna_overlap.py |
import os
import sys
import numpy as np
from annogesiclib.lib_reader import read_wig, read_libs
from annogesiclib.coverage_detection import get_repmatch
def check_tex_conds(tracks, libs, texs, check_tex, conds, tex_notex):
for track in tracks:
for lib in libs:
if lib["name"] == track:
if "tex" in lib["type"]:
type_ = "tex"
else:
type_ = "frag"
index = "_".join([lib["cond"]])
if len(texs) != 0:
for key, num in texs.items():
if track in key:
if (texs[key] >= tex_notex) and (
key not in check_tex):
check_tex.append(key)
if index not in conds.keys():
conds[index] = 1
else:
conds[index] += 1
else:
if index not in conds.keys():
conds[index] = 1
else:
conds[index] += 1
def detect_hight_toler(cover, height, tmp_covers, tracks, lib_track):
if cover > height:
if tmp_covers["best"] < cover:
tmp_covers["best"] = cover
tracks.append(lib_track)
else:
if cover > tmp_covers["toler"]:
tmp_covers["toler"] = cover
def elongation(lib_conds, template_texs, libs, strand, trans,
args_tran, strain, tolers):
'''check coverage and replicate match to form transcript'''
first = True
pre_pos = -1
check_tex = []
tracks = []
conds = {}
pre_wig = None
detect = False
texs = template_texs.copy()
tmp_covers = {"best": 0, "toler": -1}
for cond, lib_tracks in lib_conds.items():
for lib_name, covers in lib_tracks.items():
index_pos = 0
for cover in covers:
for cond, lib_tracks in lib_conds.items():
for lib_track in lib_tracks.keys():
real_track = lib_track.split("|")[-3]
if index_pos < len(lib_tracks[lib_track]):
compare_cover = lib_tracks[lib_track][index_pos]
else:
compare_cover = 0
detect_hight_toler(
compare_cover, args_tran.height,
tmp_covers, tracks, real_track)
for track in tracks:
if len(texs) != 0:
for key, num in texs.items():
if track in key:
texs[key] += 1
check_tex_conds(tracks, libs, texs, check_tex,
conds, args_tran.tex)
for cond, detect_num in conds.items():
if ("tex" in cond):
tex_rep = get_repmatch(args_tran.replicates["tex"], cond)
if detect_num >= tex_rep:
detect = True
elif ("frag" in cond):
frag_rep = get_repmatch(args_tran.replicates["frag"], cond)
if detect_num >= frag_rep:
detect = True
if detect:
detect = False
trans[strain].append(tmp_covers["best"])
else:
trans[strain].append(-1)
if (tmp_covers["toler"] != -1):
tolers.append(tmp_covers["toler"])
else:
tolers.append(args_tran.height + 10)
tmp_covers = {"best": 0, "toler": -1}
tracks = []
conds = {}
check_tex = []
texs = template_texs.copy()
index_pos += 1
break
break
def transfer_to_tran(wigs, libs, template_texs, strand, args_tran):
'''check coverage and replicate match to form transcript'''
tolers = {}
trans = {}
detect = False
for strain, lib_conds in wigs.items():
if strain not in trans:
trans[strain] = []
tolers[strain] = []
elongation(lib_conds, template_texs, libs, strand, trans,
args_tran, strain, tolers[strain])
return tolers, trans
def print_transcript(finals, out):
for strain, datas in finals.items():
num = 0
datas = sorted(datas, key=lambda x: (
x["start"], x["end"], x["strand"]))
for data in datas:
name = '%0*d' % (5, num)
attribute = ";".join(["=".join(items) for items in ([
("ID", strain + "_transcript" + str(num)),
("Name", "transcript_" + name),
("high_coverage", str(data["high"])),
("low_coverage", str(data["low"])),
("detect_lib", data["wig"])])])
out.write("\t".join([str(field) for field in [
strain, "ANNOgesic", "transcript", str(data["start"]),
str(data["end"]), ".", data["strand"], ".",
attribute]]) + "\n")
num += 1
def fill_gap_and_print(trans, strand, finals, tolers, wig_type, args_tran):
'''compare transcript with CDS to modify transcript(merge mutliple
transcript based on overlap with the same CDS)'''
for strain, covers in trans.items():
if strain not in finals:
finals[strain] = []
first = True
start = -1
end = -1
pre_cover = None
cover_pos = 1
for cover in covers:
fit = True
if cover != -1:
if first:
first = False
start = cover_pos
high_cover = cover
low_cover = cover
else:
if (cover_pos - pre_pos) <= args_tran.tolerance:
if cover_pos - pre_pos > 1:
for toler_strain, toler_datas in tolers.items():
if toler_strain == strain:
toler_covers = toler_datas[
(pre_pos - 1): cover_pos]
for toler_cover in toler_covers:
if (toler_cover < args_tran.low_cutoff):
fit = False
break
if fit:
end = cover_pos
if high_cover < cover:
high_cover = cover
if low_cover > cover:
low_cover = cover
if ((cover_pos - pre_pos) >
args_tran.tolerance) or (not fit):
if (start != -1) and (end != -1) and (
(end - start) >= args_tran.width):
finals[strain].append({
"start": start, "end": end, "strand": strand,
"high": high_cover, "low": low_cover,
"wig": wig_type})
start = cover_pos
end = -1
high_cover = cover
low_cover = cover
pre_cover = cover
pre_pos = cover_pos
cover_pos += 1
if (len(covers) != 0) and (not first) and (
(start != -1) and (end != -1) and (
(end - start) >= args_tran.width)):
finals[strain].append({
"start": start, "end": end, "strand": strand,
"high": high_cover, "low": low_cover,
"wig": wig_type})
return finals
def detect_transcript(wig_f_file, wig_r_file, wig_folder, input_lib,
out_file, wig_type, args_tran):
out = open(out_file, "w")
out.write("##gff-version 3\n")
finals = {}
libs, texs = read_libs(input_lib, wig_folder)
wig_fs = read_wig(wig_f_file, "+", libs)
wig_rs = read_wig(wig_r_file, "-", libs)
tolers_f, tran_fs = transfer_to_tran(wig_fs, libs, texs, "+", args_tran)
tolers_r, tran_rs = transfer_to_tran(wig_rs, libs, texs, "-", args_tran)
fill_gap_and_print(tran_fs, "+", finals, tolers_f, wig_type, args_tran)
fill_gap_and_print(tran_rs, "-", finals, tolers_r, wig_type, args_tran)
print_transcript(finals, out)
out.close()
del wig_fs
del wig_rs | ANNOgesic | /ANNOgesic-1.1.14.linux-x86_64.tar.gz/usr/local/lib/python3.10/dist-packages/annogesiclib/transcript_detection.py | transcript_detection.py |
import os
import csv
import shutil
from annogesiclib.helper import Helper
from annogesiclib.multiparser import Multiparser
from annogesiclib.gene_ontology import retrieve_uniprot, map2goslim
class GoTermFinding(object):
'''Retrieving the GO term'''
def __init__(self, args_go):
self.multiparser = Multiparser()
self.helper = Helper()
self.out_all = os.path.join(args_go.out_folder, "all_CDSs")
self.out_express = os.path.join(args_go.out_folder, "expressed_CDSs")
self.result_all_path = os.path.join(self.out_all, "GO_term_results")
self.result_express_path = os.path.join(self.out_express,
"GO_term_results")
self.gff_path = os.path.join(args_go.gffs, "tmp")
if args_go.trans is not None:
self.tran_path = os.path.join(args_go.trans, "tmp")
else:
self.tran_path = None
self.stat_all_path = os.path.join(self.out_all, "statistics")
self.stat_express_path = os.path.join(self.out_express,
"statistics")
self.all_strain = "all_genomes_uniprot.csv"
def _retrieve_go(self, uniprot, out_path, type_, log):
prefixs = []
log.write("Running gene_ontology.py to retrieve GO terms.\n")
for gff in os.listdir(self.gff_path):
prefix = gff.replace(".gff", "")
prefixs.append(prefix)
self.helper.check_make_folder(os.path.join(out_path, prefix))
out_file = os.path.join(out_path, prefix,
"_".join([prefix, "uniprot.csv"]))
print("Extracting GO terms of {0} from UniProt".format(prefix))
if self.tran_path is not None:
tran_file = os.path.join(self.tran_path,
"_".join([prefix, "transcript.gff"]))
if not os.path.exists(tran_file):
tran_file = None
else:
tran_file = None
retrieve_uniprot(uniprot, os.path.join(self.gff_path, gff),
out_file, tran_file, type_)
log.write("\t" + out_file + " is generated.\n")
def _remove_header(self, out_all):
out = open(out_all + "_tmp", "w")
fh = open(out_all, "r")
out.write("\t".join(["Genome", "Strand", "Start", "End",
"Protein_id", "Go_term"]) + "\n")
for row in csv.reader(fh, delimiter='\t'):
if row[0] != "Genome":
out.write("\t".join(row) + "\n")
out.close()
fh.close()
shutil.move(out_all + "_tmp", out_all)
def _merge_files(self, gffs, out_path, out_folder, log):
'''merge the files according to the input genome folder'''
folders = []
log.write("Merging the output files based on the input genome "
"information.\n")
for folder in os.listdir(gffs):
if folder.endswith("gff_folder"):
folder_prefix = folder.replace(".gff_folder", "")
folder_path = os.path.join(out_folder, folder_prefix)
self.helper.check_make_folder(folder_path)
folders.append(folder_path)
filenames = []
for gff in os.listdir(os.path.join(gffs, folder)):
if gff.endswith(".gff"):
filenames.append(gff.replace(".gff", ""))
out_all = os.path.join(folder_path, self.all_strain)
if len(filenames) > 1:
if self.all_strain in os.listdir(folder_path):
os.remove(out_all)
for filename in filenames:
csv_file = "_".join([filename, "uniprot.csv"])
self.helper.merge_file(os.path.join(out_path,
filename, csv_file), out_all)
self._remove_header(out_all)
shutil.copy(os.path.join(out_path, filename, csv_file),
folder_path)
else:
shutil.copyfile(os.path.join(out_path, filenames[0],
"_".join([filenames[0], "uniprot.csv"])),
out_all)
self.helper.remove_all_content(out_path, None, "dir")
self.helper.remove_all_content(out_path, None, "file")
for folder in folders:
folder_prefix = folder.split("/")[-1]
shutil.move(folder, os.path.join(out_path, folder_prefix))
for file_ in os.listdir(os.path.join(out_path, folder_prefix)):
log.write("\t" + os.path.join(out_path, folder_prefix, file_) +
" is generated.\n")
def _stat(self, out_path, stat_path, go, goslim, out_folder, log):
log.write("Running gene_ontology.py to Retrieve GOslim terms and "
"do statistics.\n")
log.write("The following files are generated:\n")
for folder in os.listdir(out_path):
strain_stat_path = os.path.join(stat_path, folder)
self.helper.check_make_folder(strain_stat_path)
fig_path = os.path.join(strain_stat_path, "figs")
if "fig" not in os.listdir(strain_stat_path):
os.mkdir(fig_path)
stat_file = os.path.join(strain_stat_path,
"_".join(["stat", folder + ".csv"]))
map2goslim(goslim, go,
os.path.join(out_path, folder, self.all_strain),
stat_file, out_folder)
log.write("\t" + stat_file + "\n")
self.helper.move_all_content(out_folder, fig_path,
["_three_roots.png"])
self.helper.move_all_content(out_folder, fig_path,
["_molecular_function.png"])
self.helper.move_all_content(out_folder, fig_path,
["_cellular_component.png"])
self.helper.move_all_content(out_folder, fig_path,
["_biological_process.png"])
for file_ in os.listdir(fig_path):
log.write("\t" + os.path.join(fig_path, file_) + "\n")
def run_go_term(self, args_go, log):
for gff in os.listdir(args_go.gffs):
if gff.endswith(".gff"):
self.helper.check_uni_attributes(os.path.join(
args_go.gffs, gff))
self.multiparser.parser_gff(args_go.gffs, None)
if args_go.trans is not None:
self.multiparser.parser_gff(args_go.trans, "transcript")
print("Computing all CDSs")
log.write("Retrieving GO terms for all CDSs.\n")
self._retrieve_go(args_go.uniprot, self.result_all_path, "all", log)
self._merge_files(args_go.gffs, self.result_all_path, self.out_all, log)
self._stat(self.result_all_path, self.stat_all_path, args_go.go,
args_go.goslim, self.out_all, log)
if args_go.trans is not None:
log.write("Retrieving GO terms only for expressed CDSs.\n")
print("Computing express CDSs")
self._retrieve_go(args_go.uniprot, self.result_express_path,
"express", log)
self._merge_files(args_go.gffs, self.result_express_path,
self.out_express, log)
self._stat(self.result_express_path, self.stat_express_path,
args_go.go, args_go.goslim, self.out_express, log)
self.helper.remove_tmp_dir(args_go.gffs)
if args_go.trans is not None:
self.helper.remove_tmp_dir(args_go.trans) | ANNOgesic | /ANNOgesic-1.1.14.linux-x86_64.tar.gz/usr/local/lib/python3.10/dist-packages/annogesiclib/goterm.py | goterm.py |
import os
import sys
import time
import shutil
import copy
from glob import glob
from subprocess import call, Popen
from annogesiclib.multiparser import Multiparser
from annogesiclib.helper import Helper
from annogesiclib.converter import Converter
from annogesiclib.circRNA_detection import detect_circrna
class CircRNADetection(object):
'''Detection of circRNA'''
def __init__(self, args_circ):
self.multiparser = Multiparser()
self.helper = Helper()
self.converter = Converter()
self.alignment_path = os.path.join(args_circ.output_folder,
"segemehl_alignment_files")
self.splice_path = os.path.join(args_circ.output_folder,
"segemehl_splice_results")
self.candidate_path = os.path.join(args_circ.output_folder,
"circRNA_tables")
self.gff_folder = os.path.join(args_circ.output_folder, "gffs")
self.gff_path = os.path.join(args_circ.gffs, "tmp")
self.splices = {"file": "splicesites.bed",
"splice": "splicesites"}
self.trans = {"file": "transrealigned.bed",
"trans": "transrealigned"}
self.fasta_path = os.path.join(args_circ.fastas, "tmp")
def _wait_process(self, processes):
'''wait for the parallels to finish the process'''
for p in processes:
p.wait()
if p.stdout:
p.stdout.close()
if p.stdin:
p.stdin.close()
if p.stderr:
p.stderr.close()
try:
p.kill()
except OSError:
pass
time.sleep(5)
def _deal_zip_file(self, read_files, log):
tmp_datas = []
tmp_reads = []
for reads in read_files:
zips = []
tmp_datas = reads["files"]
for read in reads["files"]:
if read.endswith(".bz2"):
mod_read = read.replace(".bz2", "")
if (".fa" not in mod_read) and (
".fasta" not in mod_read) and (
".fna" not in mod_read) and (
".fq" not in mod_read) and (
".fastq" not in mod_read):
mod_read = mod_read + ".fa"
read_out = open(mod_read, "w")
tmp_datas.append(mod_read)
zips.append(mod_read)
print(" ".join(["Uncompressing", read]))
log.write(" ".join(["bzcat", read]) + "\n")
call(["bzcat", read], stdout=read_out)
log.write("\t" + mod_read + " is generated.\n")
read_out.close()
elif read.endswith(".gz"):
mod_read = read.replace(".gz", "")
if (".fa" not in mod_read) and (
".fasta" not in mod_read) and (
".fna" not in mod_read) and (
".fq" not in mod_read) and (
".fastq" not in mod_read):
mod_read = mod_read + ".fa"
read_out = open(mod_read, "w")
tmp_datas.append(mod_read)
zips.append(mod_read)
print(" ".join(["Uncompressing", read]))
log.write(" ".join(["zcat", read]) + "\n")
call(["zcat", read], stdout=read_out)
read_out.close()
log.write("\t" + mod_read + " is generated.\n")
tmp_reads.append({"sample": reads["sample"],
"files": tmp_datas, "zips": zips})
return tmp_reads
def _run_segemehl_fasta_index(self, segemehl_path, fasta_path,
index, fasta, log):
log.write(" ".join([segemehl_path,
"-x", os.path.join(fasta_path, index),
"-d", os.path.join(fasta_path, fasta)]) + "\n")
call([segemehl_path,
"-x", os.path.join(fasta_path, index),
"-d", os.path.join(fasta_path, fasta)])
def _run_segemehl_align(self, args_circ, index, fasta, read,
sam_file, log_file, fasta_prefix, log):
out = open(os.path.join(self.alignment_path,
fasta_prefix, sam_file), "w")
log = open(os.path.join(self.alignment_path,
fasta_prefix, log_file), "w")
log.write(" ".join([args_circ.segemehl_path,
"-i", os.path.join(self.fasta_path, index),
"-d", os.path.join(self.fasta_path, fasta),
"-q", read, "-S"]) + "\n")
p = Popen([args_circ.segemehl_path,
"-i", os.path.join(self.fasta_path, index),
"-d", os.path.join(self.fasta_path, fasta),
"-q", read, "-S"],
stdout=out, stderr=log)
return p
def _align(self, args_circ, read_datas, log):
'''align the read. if the bam files are provided, it can be skipped.'''
prefixs = []
align_files = []
log.write("Using segemehl to align the read.\n")
log.write("Please make sure the version of segemehl is at least 0.1.9.\n")
for fasta in os.listdir(self.fasta_path):
index = fasta.replace(".fa", ".idx")
self._run_segemehl_fasta_index(args_circ.segemehl_path,
self.fasta_path, index, fasta, log)
processes = []
num_process = 0
fasta_prefix = fasta.replace(".fa", "")
prefixs.append(fasta_prefix)
self.helper.check_make_folder(os.path.join(
self.alignment_path, fasta_prefix))
log.write("Running for {0}.\n".format(fasta_prefix))
for reads in read_datas:
for read in reads["files"]:
num_process += 1
read_name = read.split("/")[-1]
if read_name.endswith(".fa") or \
read_name.endswith(".fna") or \
read_name.endswith(".fasta") or \
read_name.endswith(".fq") or \
read_name.endswith(".fastq"):
filename = read_name.split(".")
read_prefix = ".".join(filename[:-1])
sam_file = "_".join([read_prefix, fasta_prefix + ".sam"])
log_file = "_".join([read_prefix, fasta_prefix + ".log"])
align_files.append("_".join([read_prefix, fasta_prefix]))
print("Mapping {0}".format(sam_file))
p = self._run_segemehl_align(
args_circ, index, fasta, read,
sam_file, log_file, fasta_prefix, log)
processes.append(p)
if num_process == args_circ.cores:
self._wait_process(processes)
num_process = 0
self._wait_process(processes)
log.write("Done!\n")
log.write("The following files are generated in {0}:\n".format(
os.path.join(self.alignment_path, fasta_prefix)))
for file_ in os.listdir(os.path.join(
self.alignment_path, fasta_prefix)):
log.write("\t" + file_ + "\n")
return align_files, prefixs
def _run_samtools_convert_bam(self, samtools_path, pre_sam, out_bam, log):
log.write(" ".join([samtools_path, "view",
"-bS", pre_sam, "-o", out_bam]) + "\n")
call([samtools_path, "view", "-bS", pre_sam, "-o", out_bam])
def _convert_sam2bam(self, sub_alignment_path, samtools_path, align_files, log):
bam_files = []
convert_ones = []
remove_ones = []
log.write("Using Samtools to convert SAM files to BAM files.\n")
log.write("Please make sure the version of Samtools is at least 1.3.1.\n")
for sam in os.listdir(sub_alignment_path):
pre_sam = os.path.join(sub_alignment_path, sam)
if sam.endswith(".sam"):
bam_file = sam.replace(".sam", ".bam")
print("Converting {0} to {1}".format(sam, bam_file))
out_bam = os.path.join(sub_alignment_path, bam_file)
self._run_samtools_convert_bam(samtools_path, pre_sam,
out_bam, log)
bam_files.append(out_bam)
if align_files:
if bam_file.replace(".bam", "") not in align_files:
convert_ones.append(out_bam)
else:
remove_ones.append(pre_sam)
elif sam.endswith(".bam"):
if (pre_sam not in convert_ones) and (
pre_sam not in remove_ones):
bam_files.append(pre_sam)
elif sam.endswith(".log"):
os.remove(pre_sam)
log.write("Done!\n")
log.write("The following files are generated:\n")
for file_ in os.listdir(sub_alignment_path):
if file_.endswith(".bam"):
log.write("\t" + os.path.join(sub_alignment_path, file_) + "\n")
return bam_files, convert_ones, remove_ones
def _run_samtools_merge_sort(self, samtools_path, prefix,
out_folder, bam_datas, log):
log.write("Using Samtools for merging, sorting and converting "
"the BAM files.\n")
log.write("Make sure the version Samtools is at least 1.3.1.\n")
for bam_data in bam_datas:
print("Merging bam files for {0} of {1}".format(
prefix, bam_data["sample"]))
sample_bam = os.path.join(out_folder, "_".join([
prefix, bam_data["sample"] + ".bam"]))
if len(bam_data["files"]) <= 1:
shutil.copyfile(bam_data["files"][0], sample_bam)
else:
file_line = " ".join(bam_data["files"])
log.write(" ".join([samtools_path, "merge",
sample_bam, file_line]) + "\n")
os.system(" ".join([samtools_path, "merge",
sample_bam, file_line]))
print("Sorting bam files for {0} of {1}".format(
prefix, bam_data["sample"]))
sort_sample = os.path.join(out_folder,
"_".join([prefix, bam_data["sample"] + "_sort.bam"]))
log.write(" ".join([samtools_path, "sort",
"-o", sort_sample, sample_bam]) + "\n")
call([samtools_path, "sort", "-o", sort_sample, sample_bam])
os.remove(sample_bam)
print("Converting bam files to sam files for {0} of {1}".format(
prefix, bam_data["sample"]))
log.write(" ".join([samtools_path, "view", "-h", "-o",
sort_sample.replace(".bam", ".sam"), sort_sample]) + "\n")
call([samtools_path, "view", "-h", "-o",
sort_sample.replace(".bam", ".sam"), sort_sample])
log.write("Done!\n")
log.write("\t" + sort_sample.replace(".bam", ".sam") + " is generated.\n")
def _merge_sort_aligment_file(
self, bam_datas, read_datas, samtools_path,
out_folder, convert_ones, tmp_reads, remove_ones, prefix, log):
if bam_datas is None:
merge_bam_datas = []
for read_data in read_datas:
bam_files = []
for read in read_data["files"]:
if read.endswith(".gz") or read.endswith(".bz2"):
read = ".".join(
read.split("/")[-1].split(".")[:-1])
read_prefix = ".".join(
read.split("/")[-1].split(".")[:-1])
bam_files.append(os.path.join(
self.alignment_path, prefix,
"_".join([read_prefix, prefix + ".bam"])))
merge_bam_datas.append({"sample": read_data["sample"],
"files": bam_files})
elif (bam_datas is not None) and (read_datas is not None):
merge_bam_datas = copy.deepcopy(bam_datas)
for bam_data in merge_bam_datas:
for read_data in read_datas:
if bam_data["sample"] == read_data["sample"]:
for read in read_data["files"]:
read_prefix = ".".join(
read.split("/")[-1].split(".")[:-1])
bam = os.path.join(
self.alignment_path, prefix,
"_".join([read_prefix, prefix + ".bam"]))
if (bam not in bam_data["files"]):
bam_data["files"].append(bam)
else:
merge_bam_datas = copy.deepcopy(bam_datas)
self._run_samtools_merge_sort(samtools_path, prefix,
out_folder, merge_bam_datas, log)
for bam in convert_ones:
os.remove(bam)
for sam in remove_ones:
os.remove(sam)
def _run_testrealign(self, prefix, testrealign_path, out_folder, log):
log.write("Using Segemehl to detect circular RNAs.\n")
log.write("Please make sure the version of Segemehl is at least 0.1.9.\n")
log.write("Please make sure your testrealign.x exists. If it does not "
"exists, please reinstall your Segemehl via using make all.\n")
sub_splice_path = os.path.join(self.splice_path, prefix)
if not os.path.exists(sub_splice_path):
os.mkdir(sub_splice_path)
err_log = os.path.join(sub_splice_path, prefix + ".log")
print("Running testrealign.x for {0}".format(prefix))
for sam_file in os.listdir(out_folder):
if sam_file.endswith("sort.sam"):
sample_prefix = sam_file.replace("_sort.sam", "")
command = " ".join([
testrealign_path,
"-d", os.path.join(self.fasta_path, prefix + ".fa"),
"-q", os.path.join(out_folder, sam_file), "-n",
"-U", os.path.join(sub_splice_path,
sample_prefix + "_splicesites.bed"),
"-T", os.path.join(sub_splice_path,
sample_prefix + "_transrealigned.bed")])
log.write(command + " 2>" + err_log + "\n")
os.system(command + " 2>" + err_log)
log.write("Done!\n")
log.write("The following files are generated:\n")
for file_ in os.listdir(sub_splice_path):
log.write("\t" + os.path.join(sub_splice_path, file_) + "\n")
self.helper.remove_all_content(out_folder, ".sam", "file")
def _merge_bed(self, fastas, splice_path, output_folder):
'''Merge the bed files for analysis'''
fa_prefixs = []
for fasta in os.listdir(fastas):
headers = []
if (fasta.endswith(".fa") or fasta.endswith(".fna") or
fasta.endswith(".fasta")):
with open(os.path.join(fastas, fasta), "r") as f_h:
for line in f_h:
line = line.strip()
if line.startswith(">"):
headers.append(line[1:])
filename = fasta.split(".")
fasta_prefix = ".".join(filename[:-1])
fa_prefixs.append(fasta_prefix)
bed_folder = os.path.join(
output_folder, fasta_prefix)
self.helper.check_make_folder(bed_folder)
samples = []
for header in headers:
for splice in os.listdir(os.path.join(
splice_path, header)):
if splice.endswith(".bed"):
if self.splices["file"] in splice:
sample = splice.replace(header, "")
sample = sample.replace(
self.splices["file"], "")
if sample not in samples:
samples.append(sample)
shutil.copyfile(
os.path.join(
splice_path, header, splice),
os.path.join(
bed_folder, "tmp_" + splice))
for sample in samples:
out_splice = os.path.join(bed_folder, "".join([
fasta_prefix + sample + self.splices["file"]]))
out_trans = os.path.join(bed_folder, "".join([
fasta_prefix + sample + self.trans["file"]]))
if os.path.exists(out_splice):
os.remove(out_splice)
if os.path.exists(out_trans):
os.remove(out_trans)
for file_ in os.listdir(bed_folder):
if (self.splices["splice"] in file_) and (
sample in file_):
self.helper.merge_file(os.path.join(
bed_folder, file_), out_splice)
elif (self.trans["trans"] in file_) and (
sample in file_):
self.helper.merge_file(os.path.join(
bed_folder, file_), out_trans)
self.helper.remove_all_content(splice_path, None, "dir")
return samples, fa_prefixs
def _stat_and_gen_gff(self, prefixs, samples, args_circ, log):
'''do statistics and print the result to gff file'''
log.write("Running circRNA.py to do statistics and generate gff files.\n")
log.write("The following files are generated:\n")
for prefix in prefixs:
self.helper.check_make_folder(os.path.join(self.gff_folder,
prefix))
self.helper.check_make_folder(os.path.join(self.splice_path,
prefix))
for bed in os.listdir(os.path.join(
args_circ.output_folder, prefix)):
if (bed.split("_")[0] != "tmp") and (bed.endswith(".bed")):
shutil.copy(
os.path.join(args_circ.output_folder, prefix, bed),
os.path.join(self.splice_path, prefix))
self.helper.check_make_folder(os.path.join(
self.candidate_path, prefix))
print("Comparing circular RNAs with annotations of {0}".format(
prefix))
for sample in samples:
splice_file = os.path.join(
self.splice_path, prefix,
"".join([prefix, sample, self.splices["file"]]))
stat_file = os.path.join(args_circ.stat_folder,
"".join(["stat_", prefix, sample,
"circRNA.csv"]))
csv_all = os.path.join(self.candidate_path, prefix,
"".join([prefix, sample, "circRNA_all.csv"]))
csv_best = os.path.join(self.candidate_path, prefix,
"".join([prefix, sample, "circRNA_best.csv"]))
gff_all = os.path.join(self.gff_folder, prefix,
"".join([prefix, sample, "circRNA_all.gff"]))
gff_best = os.path.join(self.gff_folder, prefix,
"".join([prefix, sample, "circRNA_best.gff"]))
detect_circrna(splice_file, os.path.join(
self.gff_path, prefix + ".gff"), csv_all,
args_circ, stat_file)
self.converter.convert_circ2gff(
os.path.join(self.candidate_path, prefix,
"".join([prefix, sample, "circRNA_all.csv"])),
args_circ, gff_all, gff_best)
log.write("\t" + stat_file + "\n")
log.write("\t" + csv_all + "\n")
log.write("\t" + csv_best + "\n")
log.write("\t" + gff_all + "\n")
log.write("\t" + gff_best + "\n")
def _extract_input_files(self, inputs):
input_datas = []
for input_ in inputs:
datas = input_.split(":")
if len(datas) != 2:
print("Error: the format of --bam_files or "
"--read_files is wrong!")
sys.exit()
for file_ in datas[-1].split(","):
if not os.path.exists(file_):
print("Error: some files in --bam_files or "
"--read_files do not exist!")
sys.exit()
input_datas.append({"sample": datas[0],
"files": datas[-1].split(",")})
return input_datas
def _combine_read_bam(self, bam_files, bam_datas, read_datas):
if bam_datas is not None:
for bam_data in bam_datas:
for read_data in read_datas:
if bam_data["sample"] == read_data["sample"]:
for read in read_data["files"]:
prefix = ".".join(
read.split("/")[-1].split(".")[:-1])
bam = os.path.join(self.alignment_path,
prefix + ".bam")
if (bam in bam_files) and (
bam not in bam_data["files"]):
bam_data["files"].append(bam)
else:
bam_datas = []
for read_data in read_datas:
bam_files = []
for read in read_data["files"]:
prefix = ".".join(
read.split("/")[-1].split(".")[:-1])
bam_files.append(os.path.join(
self.alignment_path, prefix + ".bam"))
bam_datas.append({"sample": read_data["sample"],
"files": bam_files})
return bam_datas
def _remove_tmp_files(self, args_circ, fa_prefixs):
self.helper.remove_tmp_dir(args_circ.fastas)
self.helper.remove_tmp_dir(args_circ.gffs)
self.helper.remove_all_content(args_circ.output_folder,
".bam", "file")
for prefix in fa_prefixs:
shutil.rmtree(os.path.join(args_circ.output_folder, prefix))
def run_circrna(self, args_circ, log):
'''detection of circRNA'''
bam_datas = None
read_datas = None
if (args_circ.bams is None) and (args_circ.read_files is None):
log.write("--bam_files and --read_files can not be both emtpy.\n")
print("Error: --bam_files or --read_files should be assigned.")
sys.exit()
if args_circ.bams is not None:
bam_datas = self._extract_input_files(args_circ.bams)
if args_circ.read_files is not None:
read_datas = self._extract_input_files(args_circ.read_files)
for gff in os.listdir(args_circ.gffs):
if gff.endswith(".gff"):
self.helper.check_uni_attributes(os.path.join(
args_circ.gffs, gff))
if args_circ.segemehl_path is None:
log.write("segemehl does not exists.\n")
print("Error: please assign segemehl path!!")
sys.exit()
self.multiparser.parser_fasta(args_circ.fastas)
self.multiparser.parser_gff(args_circ.gffs, None)
self.multiparser.combine_gff(args_circ.fastas, self.gff_path,
"fasta", None)
tmp_reads = []
if args_circ.read_files:
log.write("Raw read files are found.\n")
tmp_reads = self._deal_zip_file(read_datas, log)
align_files, prefixs = self._align(args_circ, tmp_reads, log)
else:
align_files = None
prefixs = []
for fasta in os.listdir(self.fasta_path):
if fasta.endswith(".fa"):
fasta_prefix = fasta.replace(".fa", "")
prefixs.append(fasta_prefix)
for prefix in prefixs:
if args_circ.read_files:
sub_alignment_path = os.path.join(self.alignment_path, prefix)
bam_files, convert_ones, remove_ones = self._convert_sam2bam(
sub_alignment_path, args_circ.samtools_path, align_files, log)
else:
convert_ones = []
remove_ones = []
self._merge_sort_aligment_file(
bam_datas, read_datas, args_circ.samtools_path,
args_circ.output_folder,
convert_ones, tmp_reads, remove_ones, prefix, log)
self._run_testrealign(prefix, args_circ.testrealign_path,
args_circ.output_folder, log)
samples, fa_prefixs = self._merge_bed(
args_circ.fastas, self.splice_path, args_circ.output_folder)
self._stat_and_gen_gff(fa_prefixs, samples, args_circ, log)
if len(tmp_reads) != 0:
for reads in tmp_reads:
for read in reads["zips"]:
os.remove(read)
self._remove_tmp_files(args_circ, fa_prefixs) | ANNOgesic | /ANNOgesic-1.1.14.linux-x86_64.tar.gz/usr/local/lib/python3.10/dist-packages/annogesiclib/circrna.py | circrna.py |
import shutil
import csv
def import_data(row):
return{"strain": row[1], "strand": row[2],
"associate": row[3], "start_seq": int(row[4]),
"end_seq": int(row[5]), "rfam": row[6], "e": row[7],
"score": row[8],
"start_align": int(row[9]), "end_align": int(row[10]),
"info": row[0:6], "ID": row[0]}
def modify_table(table, output_all):
first = True
rbss = []
out = open("tmp.csv", "w")
out.write("#ID\tGenome\tStrand\tAssociated_CDS\tStart_genome\t"
"End_genome\tRfam\tE_value\tScore\tStart_align\tEnd_align\n")
if output_all:
with open(table) as fh:
for line in fh:
line = line.strip()
if first:
first = False
rbss.append(line)
out.write(line + "\n")
else:
if line not in rbss:
rbss.append(line)
out.write(line + "\n")
else:
fh = open(table, "r")
for row in csv.reader(fh, delimiter='\t'):
rbss.append(import_data(row))
ids = []
for rbs1 in rbss:
repeat = False
if "print" not in rbs1.keys():
rbs1["print"] = True
for rbs2 in rbss:
if (rbs1["strain"] == rbs2["strain"]) and \
(rbs1["strand"] == rbs2["strand"]) and \
(rbs1["ID"] == rbs2["ID"]):
if "print" not in rbs2.keys():
rbs2["print"] = True
repeat = True
if (not repeat) or (rbs1["ID"] not in ids):
ids.append(rbs1["ID"])
out.write("\t".join(rbs1["info"] + [rbs1["rfam"],
rbs1["e"], rbs1["score"],
str(rbs1["start_align"]),
str(rbs1["end_align"])]) + "\n")
fh.close()
out.close()
shutil.move("tmp.csv", table) | ANNOgesic | /ANNOgesic-1.1.14.linux-x86_64.tar.gz/usr/local/lib/python3.10/dist-packages/annogesiclib/modify_rbs_table.py | modify_rbs_table.py |
import os
import csv
import sys
import shutil
from subprocess import call
from annogesiclib.helper import Helper
from annogesiclib.multiparser import Multiparser
from annogesiclib.converter import Converter
from annogesiclib.merge_manual import merge_manual_predict_tss
from annogesiclib.stat_TSSpredator import stat_tsspredator
from annogesiclib.plot_TSS_venn import plot_venn
from annogesiclib.validate_gene import validate_gff
from annogesiclib.stat_TA_comparison import stat_ta_tss
from annogesiclib.check_orphan import check_orphan
from annogesiclib.filter_TSS_pro import filter_tss_pro
from annogesiclib.filter_low_expression import filter_low_expression
class TSSpredator(object):
def __init__(self, args_tss):
self.multiparser = Multiparser()
self.helper = Helper()
self.converter = Converter()
self.master = os.path.join(args_tss.out_folder, "MasterTables")
self.tmps = {"tss": "tmp_TSS", "ta_tss": "tmp_ta_tss", "tss_ta":
"tmp_tss", "tmp": "tmp"}
if args_tss.ta_files is not None:
self.tmps["ta"] = os.path.join(args_tss.ta_files, "tmp")
else:
self.tmps["ta"] = None
self.gff_path = os.path.join(args_tss.gffs, "tmp")
if args_tss.manual is not None:
self.manual_path = os.path.join(args_tss.manual, "tmp")
self.wig_path = os.path.join(args_tss.wig_folder, "tmp")
self.fasta_path = os.path.join(args_tss.fastas, "tmp")
self.stat_outfolder = os.path.join(args_tss.out_folder, "statistics")
self.gff_outfolder = os.path.join(args_tss.out_folder, "gffs")
def _assign_dict(self, lib_datas):
return {"wig": lib_datas[0],
"tex": lib_datas[1],
"condition": int(lib_datas[2]),
"replicate": lib_datas[3],
"strand": lib_datas[4]}
def _print_lib(self, lib_num, lib_list, out, wig_folder, prefix, rep_set):
for num_id in range(1, lib_num+1):
cond_list = []
for lib in lib_list:
if num_id == lib["condition"]:
cond_list.append(lib)
cond_sort_list = sorted(cond_list, key=lambda k: k['replicate'])
reps = []
for cond in cond_sort_list:
out.write("{0}_{1}{2} = {3}\n".format(
prefix, cond["condition"], cond["replicate"],
os.path.join(wig_folder, cond["wig"])))
reps.append(cond["replicate"])
for rep in sorted(rep_set):
if rep not in reps:
out.write("{0}_{1}{2} = \n".format(
prefix, cond["condition"], rep))
def _start_to_run(self, tsspredator_path, config_file, out_path, prefix, log):
print("Running TSSpredator for " + prefix)
log.write("Make sure the version of TSSpredator is at least 1.06.\n")
out = open(os.path.join(out_path, "log.txt"), "w")
err = open(os.path.join(out_path, "err.txt"), "w")
log.write(" ".join(["java", "-jar", tsspredator_path,
config_file]) + "\n")
call(["java", "-jar", tsspredator_path,
config_file], stdout=out, stderr=err)
out.close()
err.close()
log.write("Done!\n")
log.write("The following files are generated in {0}:\n".format(out_path))
for file_ in os.listdir(out_path):
log.write("\t" + file_ + "\n")
def _import_lib(self, libs, wig_folder, project_strain_name,
out, gff, program, fasta):
lib_dict = {"fp": [], "fm": [], "nm": [], "np": []}
lib_num = 0
rep_set = set()
list_num_id = []
for lib in libs:
lib_datas = lib.split(":")
if not lib_datas[0].endswith(".wig"):
print("Error: Wiggle files are not end with .wig!")
sys.exit()
for wig in os.listdir(wig_folder):
filename = wig.split("_STRAIN_")
if (filename[0] == lib_datas[0][:-4]) and (
filename[1][:-4] == project_strain_name):
lib_datas[0] = wig
if int(lib_datas[2]) > lib_num:
lib_num = int(lib_datas[2])
if lib_datas[3] not in rep_set:
rep_set.add(lib_datas[3])
if (lib_datas[1] == "tex") and (lib_datas[4] == "+"):
lib_dict["fp"].append(self._assign_dict(lib_datas))
elif (lib_datas[1] == "tex") and (lib_datas[4] == "-"):
lib_dict["fm"].append(self._assign_dict(lib_datas))
elif (lib_datas[1] == "notex") and (lib_datas[4] == "+"):
lib_dict["np"].append(self._assign_dict(lib_datas))
elif (lib_datas[1] == "notex") and (lib_datas[4] == "-"):
lib_dict["nm"].append(self._assign_dict(lib_datas))
for num_id in range(1, lib_num+1):
os.system("echo '##gff-version 3' > tmp")
g = open(gff, "r")
for row in csv.reader(g, delimiter='\t'):
if not row[0].startswith("#"):
seq_name = row[0]
break
os.system("echo '##sequence-region '" + seq_name + " >> tmp")
os.system("cat " + gff + ">> tmp")
g.close()
shutil.move("tmp", gff)
out.write("annotation_{0} = {1}\n".format(num_id, gff))
if program.lower() == "tss":
self._print_lib(lib_num, lib_dict["fm"], out,
wig_folder, "fivePrimeMinus", rep_set)
self._print_lib(lib_num, lib_dict["fp"], out,
wig_folder, "fivePrimePlus", rep_set)
elif program.lower() == "ps":
self._print_lib(lib_num, lib_dict["nm"], out,
wig_folder, "fivePrimeMinus", rep_set)
self._print_lib(lib_num, lib_dict["np"], out,
wig_folder, "fivePrimePlus", rep_set)
else:
print("Error: Wrong program name! Please assing tss "
"or processing_site.")
sys.exit()
for num_id in range(1, lib_num+1):
out.write("genome_{0} = {1}\n".format(num_id, fasta))
for num_id in range(1, lib_num+1):
list_num_id.append(str(num_id))
return lib_num, num_id, rep_set, lib_dict, list_num_id
def _print_repmatch(self, args_tss, out):
'''check replicate match'''
detect_all = False
for rep in args_tss.repmatch:
if "all" in rep:
detect_all = True
match = rep.split("_")[-1]
out.write("minNumRepMatches = {0}\n".format(match))
break
if not detect_all:
nums = {}
matchs = {}
for match in args_tss.repmatch:
lib = match.split("_")[0]
rep = match.split("_")[-1]
matchs[lib] = rep
if rep not in nums.keys():
nums[rep] = 1
else:
nums[rep] += 1
for rep, num in nums.items():
if num == max(nums.values()):
out.write("minNumRepMatches = {0}\n".format(rep))
max_rep = rep
break
for lib, rep in matchs.items():
if rep != max_rep:
out.write("minNumRepMatches_{0} = {1}\n".format(
lib, rep))
def _extract_best_para(self, args_tss, prefix, log):
detect = False
for best_file in os.listdir(args_tss.auto_load):
if best_file == "_".join(["best", prefix + ".csv"]):
bh = open(os.path.join(args_tss.auto_load, best_file),"r" )
lines = bh.readlines()
bh.close()
if len(lines[len(lines)-1].split("\t")) < 8:
print("Error: some information in {0} is missing. "
"It may be due to that \"optimize_tss_ps\" did "
"not finish successfully.".format(best_file))
log.write("Error: some information in {0} is missing. "
"It may be due to that \"optimize_tss_ps\" did "
"not finish successfully.\n".format(best_file))
sys.exit()
else:
para_info = lines[len(lines)-1].split("\t")[1].split("_")
detect_all = all(elem in para_info
for elem in ["he", "rh", "fa", "rf",
"bh", "ef", "pf"])
if (not detect_all) or (len(para_info) != 14):
print("Error: {0} is complete. Some parameters are "
"missing!".format(best_file))
log.write("Error: {0} is complete. Some parameters "
"are missing!\n".format(best_file))
sys.exit()
else:
detect = True
height = para_info[para_info.index("he") + 1]
height_reduction = para_info[
para_info.index("rh") + 1]
factor = para_info[para_info.index("fa") + 1]
factor_reduction = para_info[
para_info.index("rf") + 1]
base_height = para_info[
para_info.index("bh") + 1]
enrichment_factor = para_info[
para_info.index("ef") + 1]
processing_factor = para_info[
para_info.index("pf") + 1]
if detect:
return height, height_reduction, factor, factor_reduction, \
base_height, enrichment_factor, processing_factor
else:
print("Error: No best_{0}.csv can be found in {1}! ".format(
prefix, args_tss.auto_load))
log.write("Error: No best_{0}.csv can be found in {1}\n".format(
prefix, args_tss.auto_load))
sys.exit()
def _get_input_para(self, args_tss, prefix, log):
if args_tss.genome_order is None:
height = args_tss.height[0]
height_reduction = args_tss.height_reduction[0]
factor = args_tss.factor[0]
factor_reduction = args_tss.factor_reduction[0]
base_height = args_tss.base_height[0]
enrichment_factor = args_tss.enrichment_factor[0]
processing_factor = args_tss.processing_factor[0]
else:
if prefix not in args_tss.genome_order:
print("Error: the parameters for {0} were not assigned!".format(
prefix))
log.write("Error: the parameters for {0} were not assigned!\n".format(
prefix))
sys.exit()
else:
index = args_tss.genome_order.index(prefix)
height = args_tss.height[index]
height_reduction = args_tss.height_reduction[index]
factor = args_tss.factor[index]
factor_reduction = args_tss.factor_reduction[index]
base_height = args_tss.base_height[index]
enrichment_factor = args_tss.enrichment_factor[index]
processing_factor = args_tss.processing_factor[index]
return height, height_reduction, factor, factor_reduction, \
base_height, enrichment_factor, processing_factor
def _gen_config(self, project_strain_name, args_tss, gff,
wig_folder, fasta, config_file, log):
'''generation of config files'''
log.write("Generating config files for TSSpredator.\n")
if args_tss.auto_load is not None:
height, height_reduction, factor, factor_reduction, \
base_height, enrichment_factor, processing_factor = \
self._extract_best_para(args_tss, project_strain_name, log)
else:
height, height_reduction, factor, factor_reduction, \
base_height, enrichment_factor, processing_factor = \
self._get_input_para(args_tss, project_strain_name, log)
master_folder = "MasterTable_" + project_strain_name
out_path = os.path.join(self.master, master_folder)
self.helper.check_make_folder(out_path)
out = open(config_file, "w")
out.write("TSSinClusterSelectionMethod = HIGHEST\n")
out.write("allowedCompareShift = 1\n")
out.write("allowedRepCompareShift = 1\n")
lib_num, num_id, rep_set, lib_dict, list_num_id = \
self._import_lib(args_tss.libs, wig_folder, project_strain_name,
out, gff, args_tss.program, fasta)
out.write("idList = ")
out.write(",".join(list_num_id) + "\n")
out.write("maxASutrLength = 100\n")
out.write("maxGapLengthInGene = 500\n")
out.write("maxNormalTo5primeFactor = {0}\n".format(
processing_factor))
out.write("maxTSSinClusterDistance = {0}\n".format(
args_tss.cluster + 1))
out.write("maxUTRlength = {0}\n".format(args_tss.utr_length))
out.write("min5primeToNormalFactor = {0}\n".format(
enrichment_factor))
out.write("minCliffFactor = {0}\n".format(factor))
out.write("minCliffFactorDiscount = {0}\n".format(
factor_reduction))
out.write("minCliffHeight = {0}\n".format(height))
out.write("minCliffHeightDiscount = {0}\n".format(
height_reduction))
out.write("minNormalHeight = {0}\n".format(base_height))
self._print_repmatch(args_tss, out)
out.write("minPlateauLength = 0\n")
out.write("mode = cond\n")
out.write("normPercentile = 0.9\n")
if args_tss.program.lower() == "tss":
self._print_lib(lib_num, lib_dict["nm"], out,
wig_folder, "normalMinus", rep_set)
self._print_lib(lib_num, lib_dict["np"], out,
wig_folder, "normalPlus", rep_set)
else:
self._print_lib(lib_num, lib_dict["fm"], out,
wig_folder, "normalMinus", rep_set)
self._print_lib(lib_num, lib_dict["fp"], out,
wig_folder, "normalPlus", rep_set)
out.write("numReplicates = {0}\n".format(len(rep_set)))
out.write("numberOfDatasets = {0}\n".format(lib_num))
out.write("outputDirectory = {0}\n".format(out_path))
for prefix_id in range(len(args_tss.output_prefixs)):
out.write("outputPrefix_{0} = {1}\n".format(
prefix_id + 1, args_tss.output_prefixs[prefix_id]))
out.write("outputID_{0} = {1}\n".format(
prefix_id + 1, args_tss.output_id))
out.write("projectName = {0}\n".format(project_strain_name))
out.write("projectName = {0}\n".format(project_strain_name))
out.write("superGraphCompatibility = igb\n")
out.write("texNormPercentile = 0.5\n")
out.write("writeGraphs = 0\n")
out.write("writeNocornacFiles = 0\n")
log.write("\t" + config_file + " is generated.\n")
out.close()
def _convert_gff(self, prefixs, args_tss, log):
for prefix in prefixs:
out_file = os.path.join(self.gff_outfolder, "_".join([
prefix, args_tss.program]) + ".gff")
gff_f = open(out_file, "w")
out_path = os.path.join(self.master, "_".join([
"MasterTable", prefix]))
if "MasterTable.tsv" not in os.listdir(out_path):
print("Error: There is not MasterTable file in {0} ".format(
out_path))
print("Please check configuration file.")
log.write("not MasterTable file is found in {0}\n".format(
out_path))
else:
if args_tss.program.lower() == "processing":
feature = "processing_site"
elif args_tss.program.lower() == "tss":
feature = "TSS"
self.converter.convert_mastertable2gff(
os.path.join(out_path, "MasterTable.tsv"),
"ANNOgesic", feature, prefix, out_file)
log.write("\t" + out_file + "is generated.\n")
gff_f.close()
def _merge_manual(self, tsss, args_tss):
'''if manual detected TSS is provided, it can merge manual detected TSS
and TSSpredator predicted TSS'''
self.helper.check_make_folder(os.path.join(os.getcwd(),
self.tmps["tss"]))
for tss in tsss:
for gff in os.listdir(args_tss.gffs):
if (gff[:-4] == tss) and (".gff" in gff):
break
filename = "_".join([tss, args_tss.program]) + ".gff"
predict = os.path.join(self.gff_outfolder, filename)
manual = os.path.join(self.manual_path, tss + ".gff")
fasta = os.path.join(self.fasta_path, tss + ".fa")
stat_file = "stat_compare_TSSpredator_manual_{0}.csv".format(tss)
if os.path.exists(manual):
print("Merging and classiflying manually-detected "
"TSSs for {0}".format(tss))
merge_manual_predict_tss(
predict, stat_file,
os.path.join(self.tmps["tss"], filename),
os.path.join(args_tss.gffs, gff), args_tss, manual, fasta)
if os.path.exists(stat_file):
shutil.move(stat_file, os.path.join(
args_tss.out_folder, "statistics", tss, stat_file))
self.helper.move_all_content(self.tmps["tss"],
self.gff_outfolder, [".gff"])
shutil.rmtree(self.tmps["tss"])
def _validate(self, tsss, args_tss, log):
'''validate TSS with genome annotation'''
print("Validating TSSs with genome annotations")
log.write("Running validate_gene.py to compare genome "
"annotations and TSSs/PSs.\n")
for tss in tsss:
for gff in os.listdir(args_tss.gffs):
if (gff[:-4] == tss) and (".gff" in gff):
break
stat_file = os.path.join(
self.stat_outfolder, tss,
"".join(["stat_gene_vali_", tss, ".csv"]))
out_cds_file = os.path.join(args_tss.out_folder, "tmp.gff")
if args_tss.program.lower() == "tss":
compare_file = os.path.join(self.gff_outfolder,
"_".join([tss, "TSS.gff"]))
elif args_tss.program.lower() == "processing":
compare_file = os.path.join(self.gff_outfolder,
"_".join([tss, "processing.gff"]))
validate_gff(compare_file, os.path.join(args_tss.gffs, gff),
stat_file, out_cds_file, args_tss.utr_length,
args_tss.program.lower())
log.write("\t" + stat_file + " is generated.\n")
shutil.move(out_cds_file, os.path.join(args_tss.gffs, gff))
def _compare_ta(self, tsss, args_tss, log):
'''compare TSS with transcript'''
detect = False
log.write("Running stat_TA_comparison to compare transcripts "
"and TSSs/PSs.\n")
print("Comparing transcripts and TSSs")
self.multiparser.parser_gff(args_tss.ta_files, "transcript")
self.multiparser.combine_gff(args_tss.gffs, self.tmps["ta"],
None, "transcript")
for tss in tsss:
stat_out = os.path.join(
self.stat_outfolder, tss, "".join([
"stat_compare_TSS_transcript_",
tss, ".csv"]))
for ta in os.listdir(self.tmps["ta"]):
filename = ta.split("_transcript")
if (filename[0] == tss) and (filename[1] == ".gff"):
detect = True
break
compare_file = os.path.join(self.gff_outfolder,
"_".join([tss, "TSS.gff"]))
if detect:
stat_ta_tss(os.path.join(self.tmps["ta"], ta), compare_file,
stat_out, self.tmps["ta_tss"],
self.tmps["tss_ta"], args_tss.fuzzy)
self.helper.sort_gff(self.tmps["tss_ta"], compare_file)
self.helper.sort_gff(self.tmps["ta_tss"],
os.path.join(args_tss.ta_files, ta))
os.remove(self.tmps["tss_ta"])
os.remove(self.tmps["ta_tss"])
detect = False
log.write("\t" + stat_out + " is generated.\n")
def _stat_tss(self, tsss, feature, log):
print("Running statistaics")
for tss in tsss:
compare_file = os.path.join(self.gff_outfolder,
"_".join([tss, feature]) + ".gff")
stat_tsspredator(
compare_file, feature,
os.path.join(self.stat_outfolder, tss, "_".join([
"stat", feature, "class", tss]) + ".csv"),
os.path.join(self.stat_outfolder, tss, "_".join([
"stat", feature, "libs", tss]) + ".csv"))
self.helper.move_all_content(os.getcwd(), os.path.join(
self.stat_outfolder, tss), ["_class", ".png"])
for file_ in os.listdir(self.stat_outfolder):
if file_.startswith("TSSstatistics_"):
shutil.move(
os.path.join(
self.stat_outfolder, file_),
os.path.join(
self.stat_outfolder, tss, file_))
plot_venn(compare_file, feature)
self.helper.move_all_content(os.getcwd(), os.path.join(
self.stat_outfolder, tss), ["_venn", ".png"])
log.write("The following files in {0} are generated:\n".format(
(os.path.join(self.stat_outfolder, tss))))
for file_ in os.listdir(os.path.join(
self.stat_outfolder, tss)):
log.write("\t" + file_ + "\n")
def _get_prefixs(self, args_tss):
prefixs = []
detect = False
for fasta in os.listdir(self.fasta_path):
run = False
for gff in os.listdir(self.gff_path):
if fasta[:-3] == gff[:-4]:
prefix = fasta[:-3]
for wig in os.listdir(self.wig_path):
filename = wig.split("_STRAIN_")
if filename[1][:-4] == prefix:
detect = True
break
if detect:
prefixs.append(prefix)
return prefixs
def _merge_wigs(self, wig_folder, prefix, libs):
self.helper.check_make_folder(os.path.join(os.getcwd(),
self.tmps["tmp"]))
for wig_file in os.listdir(wig_folder):
for lib in libs:
info = lib.split(":")
if (info[0][:-4] in wig_file) and (info[-1] == "+") and (
prefix in wig_file) and (
os.path.isfile(os.path.join(wig_folder, wig_file))):
Helper().merge_file(
os.path.join(wig_folder, wig_file),
os.path.join("tmp", "merge_forward.wig"))
if (info[0][:-4] in wig_file) and (info[-1] == "-") and (
prefix in wig_file) and (
os.path.isfile(os.path.join(wig_folder, wig_file))):
Helper().merge_file(
os.path.join(wig_folder, wig_file),
os.path.join("tmp", "merge_reverse.wig"))
def _check_orphan(self, prefixs, wig_folder, args_tss):
'''if genome has no locus tag, it can use for classify the TSS'''
for prefix in prefixs:
self._merge_wigs(wig_folder, prefix, args_tss.libs)
tmp_tss = os.path.join(self.tmps["tmp"], "_".join([
prefix, args_tss.program + ".gff"]))
pre_tss = os.path.join(self.gff_outfolder, "_".join([
prefix, args_tss.program + ".gff"]))
check_orphan(pre_tss, os.path.join(
args_tss.gffs, prefix + ".gff"),
"tmp/merge_forward.wig", "tmp/merge_reverse.wig", tmp_tss)
shutil.move(tmp_tss, pre_tss)
shutil.rmtree("tmp")
def _remove_files(self, args_tss):
print("Remove temperary files and folders")
self.helper.remove_tmp_dir(args_tss.fastas)
self.helper.remove_tmp_dir(args_tss.gffs)
self.helper.remove_tmp_dir(args_tss.ta_files)
if "merge_forward.wig" in os.listdir(os.getcwd()):
os.remove("merge_forward.wig")
if "merge_reverse.wig" in os.listdir(os.getcwd()):
os.remove("merge_reverse.wig")
shutil.rmtree(args_tss.wig_folder)
if args_tss.manual is not None:
shutil.rmtree(args_tss.manual)
def _deal_with_overlap(self, out_folder, args_tss):
'''deal with the situation that TSS and
processing site at the same position'''
if not args_tss.overlap_feature:
pass
else:
print("Comparing TSSs and Processing sites")
if args_tss.program.lower() == "tss":
for tss in os.listdir(out_folder):
if tss.endswith("_TSS.gff"):
ref = self.helper.get_correct_file(
args_tss.overlap_gffs, "_processing.gff",
tss.replace("_TSS.gff", ""), None, None)
filter_tss_pro(os.path.join(out_folder, tss),
ref, args_tss.program,
args_tss.cluster)
elif args_tss.program.lower() == "processing":
for tss in os.listdir(out_folder):
if tss.endswith("_processing.gff"):
ref = self.helper.get_correct_file(
args_tss.overlap_gffs, "_TSS.gff",
tss.replace("_processing.gff", ""), None, None)
filter_tss_pro(os.path.join(out_folder, tss),
ref, args_tss.program,
args_tss.cluster)
def _remove_re_hash(self, out_folder, args_tss):
out = open("tmp_re", "w")
if args_tss.program.lower() == "tss":
for tss in os.listdir(out_folder):
if tss.endswith("_TSS.gff"):
hash_num = 0
with open(os.path.join(out_folder, tss)) as fh:
for line in fh:
line = line.strip()
if line.startswith("#"):
if hash_num == 0:
out.write(line + "\n")
hash_num += 1
else:
out.write(line + "\n")
elif args_tss.program.lower() == "processing":
for tss in os.listdir(out_folder):
if tss.endswith("_processing.gff"):
hash_num = 0
with open(os.path.join(out_folder, tss)) as fh:
for line in fh:
line = line.strip()
if line.startswith("#"):
if hash_num == 0:
out.write(line + "\n")
hash_num += 1
else:
out.write(line + "\n")
out.close()
shutil.move("tmp_re", os.path.join(out_folder, tss))
def _low_expression(self, args_tss, gff_folder):
'''deal with the low expressed TSS'''
prefix = None
self._merge_wigs(args_tss.wig_folder, "wig", args_tss.libs)
for gff in os.listdir(gff_folder):
if (args_tss.program.lower() == "tss") and (
gff.endswith("_TSS.gff")):
prefix = gff.replace("_TSS.gff", "")
elif (args_tss.program.lower() == "processing") and (
gff.endswith("_processing.gff")):
prefix = gff.replace("_processing.gff", "")
if prefix:
out = open(os.path.join(
self.stat_outfolder, prefix, "_".join([
"stat", prefix, "low_expression_cutoff.csv"])), "w")
out.write("\t".join(["Genome", "Cutoff_coverage"]) + "\n")
cutoff = filter_low_expression(
os.path.join(gff_folder, gff), args_tss,
"tmp/merge_forward.wig", "tmp/merge_reverse.wig",
"tmp/without_low_expression.gff")
out.write("\t".join([prefix, str(cutoff)]) + "\n")
os.remove(os.path.join(gff_folder, gff))
shutil.move("tmp/without_low_expression.gff",
os.path.join(gff_folder, gff))
prefix = None
out.close()
def _check_output_id(self, gff, output_id):
g = open(gff, "r")
for row in csv.reader(g, delimiter='\t'):
if len(row) != 0:
if (not row[0].startswith("#")):
tags = row[-1].split(";")
detect = False
for tag in tags:
if tag.startswith(output_id):
detect = True
if (not detect) and (row[2] == "gene"):
print("Warning: --output_id does not exist in "
"all genes of annotation gff files.")
def run_tsspredator(self, args_tss, log):
input_folder = os.path.join(args_tss.out_folder, "configs")
for gff in os.listdir(args_tss.gffs):
if gff.endswith(".gff"):
self.helper.check_uni_attributes(os.path.join(
args_tss.gffs, gff))
self._check_output_id(os.path.join(
args_tss.gffs, gff), args_tss.output_id)
self.helper.check_make_folder(self.gff_outfolder)
self.multiparser.parser_fasta(args_tss.fastas)
self.multiparser.parser_gff(args_tss.gffs, None)
self.multiparser.parser_wig(args_tss.wig_folder)
prefixs = self._get_prefixs(args_tss)
for prefix in prefixs:
config = os.path.join(input_folder,
"_".join(["config", prefix]) + ".ini")
self._gen_config(
prefix, args_tss,
os.path.join(self.gff_path, prefix + ".gff"), self.wig_path,
os.path.join(self.fasta_path, prefix + ".fa"), config, log)
out_path = os.path.join(
self.master, "_".join(["MasterTable", prefix]))
config_file = os.path.join(
input_folder, "_".join(["config", prefix]) + ".ini")
self._start_to_run(args_tss.tsspredator_path, config_file,
out_path, prefix, log)
if os.path.exists(os.path.join(out_path, "TSSstatistics.tsv")):
shutil.move(os.path.join(out_path, "TSSstatistics.tsv"),
os.path.join(
self.stat_outfolder,
"TSSstatistics_" + prefix + ".tsv"))
if args_tss.program.lower() == "ps":
args_tss.program = "processing"
self._convert_gff(prefixs, args_tss, log)
if args_tss.check_orphan:
print("checking the orphan TSSs")
log.write("Running check_orphan.py to re-check orphan TSSs.\n")
self._check_orphan(prefixs,
os.path.join(args_tss.wig_folder, "tmp"),
args_tss)
self.multiparser.combine_gff(args_tss.gffs, self.gff_outfolder,
None, args_tss.program)
datas = []
for gff in os.listdir(self.gff_outfolder):
if gff.endswith(".gff"):
gff_folder = gff.replace("".join(["_", args_tss.program,
".gff"]), "")
self.helper.check_make_folder(
os.path.join(self.stat_outfolder, gff_folder))
datas.append(gff_folder)
if args_tss.remove_low_expression is not None:
log.write("Running filter_low_expression.py to filter out "
"low expressed TSS/PS.\n")
self._low_expression(args_tss, self.gff_outfolder)
if args_tss.manual is not None:
self.multiparser.parser_gff(args_tss.manual, None)
self.multiparser.combine_gff(args_tss.gffs, self.manual_path,
None, None)
self.multiparser.combine_fasta(args_tss.gffs, self.fasta_path,
None)
self.multiparser.combine_wig(args_tss.gffs, self.wig_path,
None, args_tss.libs)
log.write("Running merge_manual.py to merge the manual TSSs.\n")
self._merge_manual(datas, args_tss)
log.write("Running filter_TSS_pro.py to deal with the overlap "
"position between TSS and PS.\n")
self._remove_re_hash(self.gff_outfolder, args_tss)
self._deal_with_overlap(self.gff_outfolder, args_tss)
log.write("Running stat_TSSpredator.py to do statistics.\n")
self._stat_tss(datas, args_tss.program, log)
if args_tss.validate:
self._validate(datas, args_tss, log)
if args_tss.ta_files is not None:
self._compare_ta(datas, args_tss, log)
self._remove_files(args_tss) | ANNOgesic | /ANNOgesic-1.1.14.linux-x86_64.tar.gz/usr/local/lib/python3.10/dist-packages/annogesiclib/tsspredator.py | tsspredator.py |
<h1 align = "center">:rocket: ANN :facepunch:</h1>
---
# Install
`pip install fast-ann`
# Usage
```python
from ann import ANN
import numpy as np
data = np.random.random((1000, 128)).astype('float32')
ann = ANN()
ann.train(data, index_factory='IVF4000, Flat', noramlize=True)
dis, idx = ann.search(data[:10])
print(dis)
print(idx)
```
---
# milvus镜像
```
yum -y install python3
# ln -sf /usr/bin/python3 /usr/bin/python
# ln -sf /usr/bin/pip3 /usr/bin/pip
pip3 install -U --no-cache-dir -i https://mirror.baidu.com/pypi/simple pip meutils pymilvus
rm -rf /tmp/*
rm -rf /root/.cache/pip*
```
---
- faiss不同量级对应的训练时间及内存测试
- 压缩方式测试
- 四种组合:默认是查向量返回 distance与index
- id => id/vector
- vector => id/vector
- push场景需要 docid => title_vector => docid
- 线上服务
- id2word
- id2vector | ANNZOO | /ANNZOO-2021.2.5.19.21.41.tar.gz/ANNZOO-2021.2.5.19.21.41/README.md | README.md |
from meutils.pipe import *
from milvus import Milvus, DataType
# from milvus.client.exceptions import CollectionNotExistException
"""
client.drop_index
client.get_config
client.list_id_in_segment
client.load_collection???
"""
class ANN(object):
def __init__(self, host='10.46.221.49', port='19530', show_info=False):
self.client = Milvus(host, port) # 线程池
if show_info:
logger.info(
{
"ClientVersion": self.client.client_version(),
"ServerVersion": self.client.server_version()
}
)
def __getattr__(self, collection_name):
return Collection(collection_name, self.client)
def create_collection(self, collection_name, fields, auto_id=True, segment_row_limit=4096):
"""
:param collection_name:
:param fields: # type: BOOL INT32 INT64 FLOAT BINARY_VECTOR FLOAT_VECTOR
fields = [
{
"name": "scalar",
"type": 'INT32',
"params": {},
"indexes": [{}]
},
{
"name": "vector",
"type": 'FLOAT_VECTOR',
"params": {"dim": 768},
"indexes": [{"index_type": 'IVF_FLAT', 'metric_type': 'IP', 'params': {'nlist': 1024}, 'index_file_size': 1024}]
}
]
# index_file_size不确定放在哪生效
:param auto_id:
:param segment_row_limit: range 4096 ~ 4194304
:return:
"""
if self.client.has_collection(collection_name):
logger.warning(f"{collection_name} already exists! to drop.")
self.client.drop_collection(collection_name)
vec_field = [_ for _ in fields if _.get('type', '').__contains__('VECTOR')][0]
# assert len(vec_fields) > 0, "至少有一个矢量"
for _ in fields:
if 'type' in _:
_['type'] = DataType.__getattr__(_['type'])
collection_param = {
"fields": fields,
"auto_id": auto_id,
"segment_row_limit": segment_row_limit,
}
# collection vector index
self.client.create_collection(collection_name, fields=collection_param)
self.client.create_index(collection_name, vec_field['name'], vec_field['indexes'][0])
logger.info(f"{self.client.get_collection_info(collection_name)}")
@property
def collection_names(self):
return self.client.list_collections()
def __create_index(self, collection_name, field_name, index_type='IVF_FLAT', metric_type='IP', index_params=None):
if index_params is None:
index_params = {'nlist': 1024}
params = {
'index_type': index_type,
# 'index_file_size': 1024, # TODO: 不确定放在哪生效
'params': index_params,
'metric_type': metric_type,
}
self.client.create_index(collection_name, field_name, params) # field_name='embedding'
class Collection(object):
def __init__(self, name=None, client=None):
self.name = name
self.client = client
self.count_entities = self.count
self.count_documents = self.count
self.vector_name = self.get_vec_field_name()
def __str__(self):
has_collection = self.client.has_collection(self.name)
if not has_collection:
logger.warning(f"{self.name} doesn't exist")
return f"Collection({self.name})"
def batch_insert(self, df_entity: pd.DataFrame, batch_size=100000):
"""
:param df_entity: id, vec, part 与 collection 字段一致
:param batch_size:
:return:
"""
entity_names = [_['name'] for _ in self.collection_info['fields']]
logger.warning(f"EntityNames: {entity_names}")
# 分区
df_entity = df_entity.reset_index(drop=True)
n = len(df_entity)
num_part = n // batch_size + 1 if n % batch_size else n // batch_size
ids = []
for i in tqdm(range(num_part), desc='BatchInsert'):
df = df_entity.iloc[i * batch_size:(i + 1) * batch_size, :]
entities = []
for record in self.collection_info['fields']:
entities.append({
'name': record['name'],
'type': record['type'],
'values': df[record['name']].values
})
ids += self.client.insert(self.name, entities, ids=None) # todo: ids
time.sleep(1)
return ids
# 启服务
def search(self, vectors=np.random.random((1, 256)), topk=10, nprobe=1,
scalar_list: List[dict] = None):
q = self.get_search_query(vectors, topk, nprobe, scalar_list)
entities = self.client.search(self.name, q)[0]
return entities
# entities = ann.client.search("demo", query_hybrid)[0]
# id2score = dict(zip(entities.ids, entities.distances))
#
# docs = mongo_collection.find({"xindaoid": {'$in': entities.ids}})
# df = pd.DataFrame(list(docs)).drop(['_id', 'category_', 'vector'], 1)
# df['distance'] = df['xindaoid'].map(id2score)
def get_entity_by_id(self, ids, fields=None):
return self.client.get_entity_by_id(self.name, ids, fields)
def delete_entity_by_id(self, ids):
self.client.delete_entity_by_id(self.name, ids)
@property
def count(self):
return self.client.count_entities(self.name)
@property
def collection_info(self):
return self.client.get_collection_info(self.name)
@property
def collection_stats(self):
return self.client.get_collection_stats(self.name)
def get_vec_field_name(self):
fields = self.collection_info['fields']
vec_field = [_ for _ in fields if str(_.get('type', '')).__contains__('VECTOR')][0]
return vec_field['name']
def get_search_query(self, vectors, topk=10, nprobe=1, scalar_list: List[dict] = None):
q = {
"bool": {
"must": [
{
"vector": {
self.vector_name: {
"topk": topk,
"query": vectors,
"metric_type": "IP",
"params": {
"nprobe": nprobe
}
}
}
},
]
}
}
if scalar_list is not None: # {"term": {"标量字段": [1,2,3]}}
for _ in scalar_list:
q['bool']['must'].append(_)
return q
if __name__ == '__main__':
ann = ANN(show_info=True)
fields = [
{
"name": "scalar",
"type": 'INT32',
"params": {},
"indexes": [{}]
},
{
"name": "vector",
"type": 'FLOAT_VECTOR',
"params": {"dim": 256},
"indexes": [
{"index_type": 'IVF_FLAT', 'metric_type': 'IP', 'params': {'nlist': 1024}, 'index_file_size': 1024}]
}
]
ann.create_collection('demo', fields)
print(ann.demo)
print(ann.demo.collection_info)
print(ann.demo.vec_field)
# print(ann.demo.collection_stats)
# entities = [
# {"name": "vec", "type": DataType.FLOAT_VECTOR, "values": vecs},
# {"name": "part", "type": DataType.INT32, "values": [i] * len(df)},
# ] | ANNZOO | /ANNZOO-2021.2.5.19.21.41.tar.gz/ANNZOO-2021.2.5.19.21.41/annzoo/ann.py | ann.py |
import faiss
import numpy as np
class ANN(object):
"""Flat支持: https://github.com/Jie-Yuan/faiss_note/blob/master/4.Faiss%20indexes%20%E8%BF%9B%E9%98%B6%E6%93%8D%E4%BD%9C.ipynb
恢复原数据
从index中移除向量
搜索距离范围内的向量
拆分/合并index
cpu_index.make_direct_map()
cpu_index.reconstruct(0)
ann.index.reconstruct
ann.index.reconstruct_c
ann.index.reconstruct_n
ann.index.reconstruct_n_c
ann.index.search_and_reconstruct
ann.index.search_and_reconstruct_c
数据集的大小
在高效检索的index中,聚类是其中的基础操作,数据量的大小主要影响聚类过程。
如果小于1M, 使用"...,IVFx,..."
N是数据集中向量个数,x一般取值[4sqrt(N),16sqrt(N)],需要30x ~ 256x个向量的数据集去训练。
如果在1M-10M,使用"...,IMI2x10,..."
使用k-means将训练集聚类为2^10个类,但是执行过程是在数据集的两半部分独立执行,即聚类中心有2^(2*10)个。
如果在10M-100M,使用"...,IMI2x12,..."
一个簇至少39个样本:一般2的N次方,我们一般用2**14或者更小一点 >> topk
经验公式 2 ** (np.log2(n) // 2 + 2)
"""
def __init__(self):
self.nogpu_index_factory = {'HNSW', 'SQ'}
def train(self, data, index_factory='Flat', metric=None, noramlize=False):
"""
:param data:
:param index_factory:
https://www.cnblogs.com/houkai/p/9316172.html
https://blog.csdn.net/xiaoxu2050/article/details/84982478
https://github.com/facebookresearch/faiss/wiki/Faiss-indexes
https://github.com/liqima/faiss_note/blob/master/4.Faiss%20indexes%20IO%E5%92%8Cindex%20factory.ipynb
:param metric:
faiss.METRIC_BrayCurtis
faiss.METRIC_Canberra
faiss.METRIC_INNER_PRODUCT: L2之后内积≈cosine
faiss.METRIC_JensenShannon
faiss.METRIC_L1
faiss.METRIC_L2
faiss.METRIC_Linf
faiss.METRIC_Lp
:return:
"""
if noramlize:
data = self.noramlize(data)
assert data.dtype == 'float32', "TODO: np.array([]).astype('float32')"
dim = data.shape[1]
args = [dim, index_factory, metric] if metric else [dim, index_factory]
self.index = faiss.index_factory(*args)
if faiss.get_num_gpus() > 0:
if any(index_factory.__contains__(i) for i in self.nogpu_index_factory):
pass
print(f"Donot Support GPU: {index_factory}")
else:
# gpu_index_flat = faiss.index_cpu_to_gpu(res, 0, index_flat)
self.index = faiss.index_cpu_to_all_gpus(self.index)
print(f"Train ...")
self.index.train(data)
self.index.add(data)
print(f"Ntotal: {self.index.ntotal}")
def search(self, data, topK=10, nprobe=1, k_factor=1):
"""
:param data:
:param topK:
:param nprobe: nprobe参数始终是调整速度和结果精度之间权衡的一种方式。
:param k_factor:
:return:
"""
self.index.k_factor = k_factor # 搜索阶段会首先选取 k_factor*topK,重排序
self.index.nprobe = nprobe # default nprobe is 1, try a few more
return self.index.search(data, topK)
def write_index(self, file_name="index_file.index"):
# faiss.index_cpu_to_all_gpus(index)
if faiss.get_num_gpus() > 0:
index = faiss.index_gpu_to_cpu(self.index)
faiss.write_index(index, str(file_name))
else:
faiss.write_index(self.index, str(file_name))
def read_index(self, file_name="index_file.index"):
index = faiss.read_index(str(file_name))
# if faiss.get_num_gpus() > 0:
# index = faiss.index_cpu_to_gpu()
# index_new = faiss.clone_index(index) # 复制索引
return index
def noramlize(self, x):
if len(x.shape) > 1:
return x / np.clip(x ** 2, 1e-12, None).sum(axis=1).reshape((-1, 1) + x.shape[2:]) ** 0.5
else:
return x / np.clip(x ** 2, 1e-12, None).sum() ** 0.5 | ANNZOO | /ANNZOO-2021.2.5.19.21.41.tar.gz/ANNZOO-2021.2.5.19.21.41/annzoo/faiss.py | faiss.py |
# ANNarchy
[](https://zenodo.org/badge/latestdoi/57382690)
ANNarchy (Artificial Neural Networks architect) is a parallel and hybrid simulator for distributed rate-coded or spiking neural networks. The core of the library is written in C++ and distributed using openMP or CUDA. It provides an interface in Python for the definition of the networks. It is released under the [GNU GPL v2 or later](http://www.gnu.org/licenses/gpl.html).
The source code is available at:
<https://github.com/ANNarchy/ANNarchy>
The documentation is available online at:
<https://annarchy.github.io/>
A forum for discussion is set at:
<https://groups.google.com/forum/#!forum/annarchy>
Bug reports should be done through the [Issue Tracker](https://github.com/ANNarchy/ANNarchy/issues) of ANNarchy on Github.
### Citation
If you use ANNarchy for your research, we would appreciate if you cite the following paper:
> Vitay J, Dinkelbach HÜ and Hamker FH (2015). ANNarchy: a code generation approach to neural simulations on parallel hardware. *Frontiers in Neuroinformatics* 9:19. [doi:10.3389/fninf.2015.00019](http://dx.doi.org/10.3389/fninf.2015.00019)
### Authors
* Julien Vitay (julien.vitay@informatik.tu-chemnitz.de).
* Helge Ülo Dinkelbach (helge-uelo.dinkelbach@informatik.tu-chemnitz.de).
* Fred Hamker (fred.hamker@informatik.tu-chemnitz.de).
## Installation
Using pip, you can install the latest stable release:
```
pip install ANNarchy
```
## Platforms
* GNU/Linux
* MacOS X
## Dependencies
* g++ >= 6.1 ( >= 7.4 recommended ) or clang++ >= 3.4
* python >= 3.7 with development files
* cython >= 0.20
* setuptools >= 40.0
* numpy >= 1.13
* sympy >= 1.6
* scipy >= 0.19
Recommended:
* matplotlib
* lxml
* PyQtGraph
* pandoc
* tensorboardX
| ANNarchy | /ANNarchy-4.7.2.6.tar.gz/ANNarchy-4.7.2.6/README.md | README.md |
from __future__ import print_function
from ANNarchy import *
import random
from time import time
# Parameters
nb_neuron = 4 # Number of exc and inh neurons
size = (32, 32) # input size
freq = 1.2 # nb_cycles/half-image
nb_stim = 40 # Number of grating per epoch
nb_epochs = 20 # Number of epochs
max_freq = 28. # Max frequency of the poisson neurons
T = 10000. # Period for averaging the firing rate
# Izhikevich Coba neuron with AMPA, NMDA and GABA receptors
RSNeuron = Neuron(
parameters = """
a = 0.02 : population
b = 0.2 : population
c = -65. : population
d = 8. : population
tau_ampa = 5. : population
tau_nmda = 150. : population
tau_gabaa = 6. : population
tau_gabab = 150. : population
vrev_ampa = 0.0 : population
vrev_nmda = 0.0 : population
vrev_gabaa = -70.0 : population
vrev_gabab = -90.0 : population
""" ,
equations="""
# Inputs
I = g_ampa * (vrev_ampa - v) + g_nmda * nmda(v, -80.0, 60.0) * (vrev_nmda -v) + g_gabaa * (vrev_gabaa - v) + g_gabab * (vrev_gabab -v)
# Midpoint scheme
dv/dt = (0.04 * v + 5.0) * v + 140.0 - u + I : init=-65., min=-90., midpoint
du/dt = a * (b*v - u) : init=-13., midpoint
# Conductances
tau_ampa * dg_ampa/dt = -g_ampa : exponential
tau_nmda * dg_nmda/dt = -g_nmda : exponential
tau_gabaa * dg_gabaa/dt = -g_gabaa : exponential
tau_gabab * dg_gabab/dt = -g_gabab : exponential
""" ,
spike = """
v >= 30.
""",
reset = """
v = c
u += d
g_ampa = 0.0
g_nmda = 0.0
g_gabaa = 0.0
g_gabab = 0.0
""",
functions = """
nmda(v, t, s) = ((v-t)/(s))^2 / (1.0 + ((v-t)/(s))^2)
""",
refractory=1.0
)
# STDP with homeostatic regulation
homeo_stdp = Synapse(
parameters="""
# STDP
tau_plus = 60. : projection
tau_minus = 90. : projection
A_plus = 0.000045 : projection
A_minus = 0.00003 : projection
# Homeostatic regulation
alpha = 0.1 : projection
beta = 50.0 : projection # <- Difference with the original implementation
gamma = 50.0 : projection
Rtarget = 10. : projection
T = 10000. : projection
""",
equations = """
# Homeostatic values
R = post.r : postsynaptic
K = R/(T*(1.+fabs(1. - R/Rtarget) * gamma)) : postsynaptic
# Nearest-neighbour
stdp = if t_post >= t_pre: ltp else: - ltd
w += (alpha * w * (1- R/Rtarget) + beta * stdp ) * K : min=0.0, max=10.0
# Traces
tau_plus * dltp/dt = -ltp : exponential
tau_minus * dltd/dt = -ltd : exponential
""",
pre_spike="""
g_target += w
ltp = A_plus
""",
post_spike="""
ltd = A_minus
"""
)
# Input population
OnPoiss = PoissonPopulation(size, rates=1.0)
OffPoiss = PoissonPopulation(size, rates=1.0)
# RS neuron for the input buffers
OnBuffer = Population(size, RSNeuron)
OffBuffer = Population(size, RSNeuron)
# Connect the buffers
OnPoissBuffer = Projection(OnPoiss, OnBuffer, ['ampa', 'nmda'])
OnPoissBuffer.connect_one_to_one(Uniform(0.2, 0.6))
OffPoissBuffer = Projection(OffPoiss, OffBuffer, ['ampa', 'nmda'])
OffPoissBuffer.connect_one_to_one(Uniform(0.2, 0.6))
# Excitatory and inhibitory neurons
Exc = Population(nb_neuron, RSNeuron)
Inh = Population(nb_neuron, RSNeuron)
Exc.compute_firing_rate(T)
Inh.compute_firing_rate(T)
# Input connections
OnBufferExc = Projection(OnBuffer, Exc, ['ampa', 'nmda'], homeo_stdp)
OnBufferExc.connect_all_to_all(Uniform(0.004, 0.015))
OffBufferExc = Projection(OffBuffer, Exc, ['ampa', 'nmda'], homeo_stdp)
OffBufferExc.connect_all_to_all(Uniform(0.004, 0.015))
# Competition
ExcInh = Projection(Exc, Inh, ['ampa', 'nmda'], homeo_stdp)
ExcInh.connect_all_to_all(Uniform(0.116, 0.403))
ExcInh.Rtarget = 75.
ExcInh.tau_plus = 51.
ExcInh.tau_minus = 78.
ExcInh.A_plus = -0.000041
ExcInh.A_minus = -0.000015
InhExc = Projection(Inh, Exc, ['gabaa', 'gabab'])
InhExc.connect_all_to_all(Uniform(0.065, 0.259))
compile()
# Inputs
def get_grating(theta):
x = np.linspace(-1., 1., size[0])
y = np.linspace(-1., 1., size[1])
xx, yy = np.meshgrid(x, y)
z = np.sin(2.*np.pi*(np.cos(theta)*xx + np.sin(theta)*yy)*freq)
return np.maximum(z, 0.), -np.minimum(z, 0.0)
# Initial weights
w_on_start = OnBufferExc.w
w_off_start = OffBufferExc.w
# Monitors
m = Monitor(Exc, 'r')
n = Monitor(Inh, 'r')
o = Monitor(OnBufferExc[0], 'w', period=1000.)
p = Monitor(ExcInh[0], 'w', period=1000.)
# Learning procedure
tstart = time()
stim_order = list(range(nb_stim))
try:
for epoch in range(nb_epochs):
random.shuffle(stim_order)
for stim in stim_order:
# Generate a grating randomly
rates_on, rates_off = get_grating(np.pi*stim/float(nb_stim))
# Set it as input to the poisson neurons
OnPoiss.rates = max_freq * rates_on
OffPoiss.rates = max_freq * rates_off
# Simulate for 2s
simulate(2000.)
# Relax the Poisson inputs
OnPoiss.rates = 1.
OffPoiss.rates = 1.
# Simulate for 500ms
simulate(500.)
print('Epoch', epoch+1, 'done.')
except KeyboardInterrupt:
print('Simulation stopped')
print('Done in ', time()-tstart)
# Recordings
datae = m.get('r')
datai = n.get('r')
dataw = o.get('w')
datal = p.get('w')
# Final weights
w_on_end = OnBufferExc.w
w_off_end = OffBufferExc.w
# Save recordings
np.savez("weights.npz", ff_on=w_on_end, ff_off=w_off_end, ff_on_time=dataw, inh_time=datal)
# Plot
import matplotlib.pyplot as plt
plt.figure()
plt.title('Feedforward weights before and after learning')
for i in range(nb_neuron):
plt.subplot(3, nb_neuron, i+1)
plt.imshow((np.array(w_on_start[i])).reshape((32,32)), aspect='auto', cmap='hot')
plt.subplot(3, nb_neuron, nb_neuron + i +1)
plt.imshow((np.array(w_on_end[i])).reshape((32,32)), aspect='auto', cmap='hot')
plt.subplot(3, nb_neuron, 2*nb_neuron + i +1)
plt.imshow((np.array(w_off_end[i])).reshape((32,32)), aspect='auto', cmap='hot')
plt.figure()
plt.plot(datae[:, 0], label='Exc')
plt.plot(datai[:, 0], label='Inh')
plt.title('Mean FR of the Exc and Inh neurons')
plt.legend()
plt.figure()
plt.subplot(121)
plt.imshow(dataw.T, aspect='auto', cmap='hot')
plt.title('Timecourse of feedforward weights')
plt.colorbar()
plt.subplot(122)
plt.imshow(datal.T, aspect='auto', cmap='hot')
plt.title('Timecourse of inhibitory weights')
plt.colorbar()
plt.show() | ANNarchy | /ANNarchy-4.7.2.6.tar.gz/ANNarchy-4.7.2.6/examples/homeostatic_stdp/SORF.py | SORF.py |
from __future__ import print_function
from ANNarchy import *
# Izhikevich RS neuron
RSNeuron = Neuron(
parameters = """
a = 0.02 : population
b = 0.2 : population
c = -65. : population
d = 8. : population
tau_ampa = 5. : population
tau_nmda = 150. : population
vrev = 0.0 : population
""" ,
equations="""
# Inputs
I = g_ampa * (vrev - v) + g_nmda * nmda(v, -80.0, 60.0) * (vrev -v)
# Midpoint scheme
dv/dt = (0.04 * v + 5.0) * v + 140.0 - u + I : init=-65., midpoint
du/dt = a * (b*v - u) : init=-13., midpoint
# Izhikevich scheme
# new_v = v + 0.5*(0.04 * v^2 + 5.0 * v + 140.0 - u + I) : init=-65.
# v = new_v + 0.5*(0.04 * new_v^2 + 5.0 * new_v + 140.0 - u + I) : init=-65.
# u += a * (b*v - u) : init=-13.
# Conductances
tau_ampa * dg_ampa/dt = -g_ampa : exponential
tau_nmda * dg_nmda/dt = -g_nmda : exponential
""" ,
spike = """
v >= 30.
""",
reset = """
v = c
u += d
""",
functions = """
nmda(v, t, s) = ((v-t)/(s))^2 / (1.0 + ((v-t)/(s))^2)
"""
)
# Input population
inp = PoissonPopulation(100, rates=np.linspace(0.2, 20., 100))
# RS neuron without homeostatic mechanism
pop1 = Population(1, RSNeuron)
pop1.compute_firing_rate(5000.)
# RS neuron with homeostatic mechanism
pop2 = Population(1, RSNeuron)
pop2.compute_firing_rate(5000.)
# Nearest Neighbour STDP
nearest_neighbour_stdp = Synapse(
parameters="""
tau_plus = 20. : projection
tau_minus = 60. : projection
A_plus = 0.0002 : projection
A_minus = 0.000066 : projection
w_max = 0.03 : projection
""",
equations = """
# Traces
tau_plus * dltp/dt = -ltp : exponential
tau_minus * dltd/dt = -ltd : exponential
# Nearest-neighbour
w += if t_post >= t_pre: ltp else: - ltd : min=0.0, max=w_max
""",
pre_spike="""
g_target += w
ltp = A_plus
""",
post_spike="""
ltd = A_minus
"""
)
# STDP with homeostatic regulation
homeo_stdp = Synapse(
parameters="""
# STDP
tau_plus = 20. : projection
tau_minus = 60. : projection
A_plus = 0.0002 : projection
A_minus = 0.000066 : projection
w_min = 0.0 : projection
w_max = 0.03 : projection
# Homeostatic regulation
alpha = 0.1 : projection
beta = 1.0 : projection
gamma = 50. : projection
Rtarget = 35. : projection
T = 5000. : projection
""",
equations = """
# Traces
tau_plus * dltp/dt = -ltp : exponential
tau_minus * dltd/dt = -ltd : exponential
# Homeostatic values
R = post.r : postsynaptic
K = R/(T*(1.+fabs(1. - R/Rtarget) * gamma)) : postsynaptic
# Nearest-neighbour
stdp = if t_post >= t_pre: ltp else: - ltd
w += (alpha * w * (1- R/Rtarget) + beta * stdp ) * K : min=w_min, max=w_max
""",
pre_spike="""
g_target += w
ltp = A_plus
""",
post_spike="""
ltd = A_minus
"""
)
# Projection without homeostatic mechanism
proj1 = Projection(inp, pop1, ['ampa', 'nmda'], synapse=nearest_neighbour_stdp)
proj1.connect_all_to_all(Uniform(0.01, 0.03))
# Projection with homeostatic mechanism
proj2 = Projection(inp, pop2, ['ampa', 'nmda'], synapse=homeo_stdp)
proj2.connect_all_to_all(weights=Uniform(0.01, 0.03))
compile()
# Record
m1 = Monitor(pop1, 'r')
m2 = Monitor(pop2, 'r')
m3 = Monitor(proj1[0], 'w', period=1000.)
m4 = Monitor(proj2[0], 'w', period=1000.)
# Simulate
T = 1000 # 1000s
simulate(T*1000., True)
# Get the data
data1 = m1.get('r')
data2 = m2.get('r')
data3 = m3.get('w')
data4 = m4.get('w')
print('Mean Firing Rate without homeostasis:', np.mean(data1[:, 0]))
print('Mean Firing Rate with homeostasis:', np.mean(data2[:, 0]))
import matplotlib.pyplot as plt
plt.subplot(311)
plt.plot(np.linspace(0, T, len(data1[:, 0])), data1[:, 0], 'r-', label="Without homeostasis")
plt.plot(np.linspace(0, T, len(data2[:, 0])), data2[:, 0], 'b-', label="With homeostasis")
plt.xlabel('Time (s)')
plt.ylabel('Firing rate (Hz)')
plt.subplot(312)
plt.plot(data3[-1, :], 'r-')
plt.plot(data4[-1, :], 'bx')
axes = plt.gca()
axes.set_ylim([0., 0.035])
plt.xlabel('# neuron')
plt.ylabel('Weights after 1000s')
plt.subplot(313)
plt.imshow(data4.T, aspect='auto', cmap='hot')
plt.xlabel('Time (s)')
plt.ylabel('# neuron')
plt.show() | ANNarchy | /ANNarchy-4.7.2.6.tar.gz/ANNarchy-4.7.2.6/examples/homeostatic_stdp/Ramp.py | Ramp.py |
from ANNarchy import *
setup(paradigm="cuda", sparse_matrix_format="dense")
# Input neuron: r is set externally
InputNeuron = Neuron(parameters="r = 0.0")
# Leaky neuron
LeakyNeuron = Neuron(
parameters="""
tau = 10.0 : population
""",
equations="""
tau * dr/dt + r = sum(exc) - sum(inh) : min=0.0
"""
)
# Oja synapse
Oja = Synapse(
parameters="""
tau = 2000.0 : postsynaptic
alpha = 8.0 : postsynaptic
min_w = 0.0 : postsynaptic
""",
equations="""
tau * dw/dt = pre.r * post.r - alpha * post.r^2 * w : min=min_w
"""
)
# Creating the populations
Input = Population(geometry=(8, 8), neuron=InputNeuron)
Feature = Population(geometry=(8, 4), neuron=LeakyNeuron)
# Creating the projections
ff = Projection(
pre=Input,
post=Feature,
target='exc',
synapse = Oja
)
ff.connect_all_to_all(weights = Uniform(-0.5, 0.5))
ff.min_w = -10.0
lat = Projection(
pre=Feature,
post=Feature,
target='inh',
synapse = Oja
)
lat.connect_all_to_all(weights = Uniform(0.0, 1.0))
lat.alpha = 0.3
# every 200 trials we update
# the receptive fields
period = 200
count = 0
# Definition of the environment
def trial():
global count
count+=1
# Reset the firing rate for all neurons
Input.r = 0.0
# Clamp horizontal bars randomly
for h in range(Input.geometry[0]):
if np.random.random() < 1.0/ float(Input.geometry[0]):
Input[h, :].r = 1.0
# Clamp vertical bars randomly
for w in range(Input.geometry[1]):
if np.random.random() < 1.0/ float(Input.geometry[1]):
Input[:, w].r = 1.0
# Simulate for 50ms
simulate(50.)
# Return firing rates and receptive fields
if count < period:
return Input.r, Feature.r, None
else:
count = 0
return Input.r, Feature.r, ff.receptive_fields()
if __name__=='__main__':
compile()
# Create and launch the GUI
from Viz import Viewer
view = Viewer(func=trial)
view.run() | ANNarchy | /ANNarchy-4.7.2.6.tar.gz/ANNarchy-4.7.2.6/examples/bar_learning/BarLearningGPU.py | BarLearningGPU.py |
from ANNarchy import *
dt = 0.02
setup(dt=dt)
HH = Neuron(
parameters = """
C = 1.0 # Capacitance
VL = -59.387 # Leak voltage
VK = -82.0 # Potassium reversal voltage
VNa = 45.0 # Sodium reveral voltage
gK = 36.0 # Maximal Potassium conductance
gNa = 120.0 # Maximal Sodium conductance
gL = 0.3 # Leak conductance
vt = 30.0 # Threshold for spike emission
I = 0.0 # External current
""",
equations = """
# Previous membrane potential
prev_V = V
# Voltage-dependency parameters
an = 0.01 * (V + 60.0) / (1.0 - exp(-0.1* (V + 60.0) ) )
am = 0.1 * (V + 45.0) / (1.0 - exp (- 0.1 * ( V + 45.0 )))
ah = 0.07 * exp(- 0.05 * ( V + 70.0 ))
bn = 0.125 * exp (- 0.0125 * (V + 70.0))
bm = 4.0 * exp (- (V + 70.0) / 80.0)
bh = 1.0/(1.0 + exp (- 0.1 * ( V + 40.0 )) )
# Alpha/Beta functions
dn/dt = an * (1.0 - n) - bn * n : init = 0.3, midpoint
dm/dt = am * (1.0 - m) - bm * m : init = 0.0, midpoint
dh/dt = ah * (1.0 - h) - bh * h : init = 0.6, midpoint
# Membrane equation
C * dV/dt = gL * (VL - V ) + gK * n**4 * (VK - V) + gNa * m**3 * h * (VNa - V) + I : midpoint
""",
spike = """
# Spike is emitted when the membrane potential crosses the threshold from below
(V > vt) and (prev_V <= vt)
""",
reset = """
# Nothing to do, it is built-in...
"""
)
pop = Population(neuron=HH, geometry=1)
pop.V = -50.0
compile()
m = Monitor(pop, ['spike', 'V', 'n', 'm', 'h'])
# Preparation
simulate(100.0)
# Current impulse for 1 ms
pop.I = 200.0
simulate(1.0)
# Reset
pop.I = 0.0
simulate(100.0)
data = m.get()
tstart = int(90.0/dt)
tstop = int(120.0/dt)
import matplotlib.pyplot as plt
plt.subplot(2,2,1)
plt.plot(90.0 + dt*np.arange(tstop-tstart), data['V'][tstart:tstop, 0])
plt.title('V')
plt.subplot(2,2,2)
plt.plot(90.0 + dt*np.arange(tstop-tstart), data['n'][tstart:tstop, 0])
plt.title('n')
plt.ylim((0.0, 1.0))
plt.subplot(2,2,3)
plt.plot(90.0 + dt*np.arange(tstop-tstart), data['m'][tstart:tstop, 0])
plt.title('m')
plt.ylim((0.0, 1.0))
plt.subplot(2,2,4)
plt.plot(90.0 + dt*np.arange(tstop-tstart), data['h'][tstart:tstop, 0])
plt.title('h')
plt.ylim((0.0, 1.0))
plt.show() | ANNarchy | /ANNarchy-4.7.2.6.tar.gz/ANNarchy-4.7.2.6/examples/hodgkin_huxley/HodgkinHuxley.py | HodgkinHuxley.py |
from ANNarchy import *
dt=0.25
setup(dt=dt)
# Leaky integrator neuron
LIF = Neuron(
parameters = """
tau = 30.0 : population
I = 15.0
tau_I = 3.0 : population
""",
equations = """
tau * dv/dt = -v + g_exc - g_inh + I : init=13.5
tau_I * dg_exc/dt = -g_exc
tau_I * dg_inh/dt = -g_inh
""",
spike = "v > 15.0",
reset = "v = 13.5",
refractory = 3.0
)
# Short-term plasticity synapse
STP = Synapse(
parameters = """
w=0.0
tau_rec = 1.0
tau_facil = 1.0
U = 0.1
""",
equations = """
dx/dt = (1 - x)/tau_rec : init = 1.0, event-driven
du/dt = (U - u)/tau_facil : init = 0.1, event-driven
""",
pre_spike="""
g_target += w * u * x
x *= (1 - u)
u += U * (1 - u)
"""
)
# Create populations
P = Population(geometry=500, neuron=LIF)
P.I = np.sort(Uniform(14.625, 15.375).get_values(500))
P.v = Uniform(0.0, 15.0)
Exc = P[:400]
Inh = P[400:]
# Parameters for the synapses
Aee = 1.8
Aei = 5.4
Aie = 7.2
Aii = 7.2
Uee = 0.5
Uei = 0.5
Uie = 0.04
Uii = 0.04
tau_rec_ee = 800.0
tau_rec_ei = 800.0
tau_rec_ie = 100.0
tau_rec_ii = 100.0
tau_facil_ie = 1000.0
tau_facil_ii = 1000.0
# Create projections
proj_ee = Projection(pre=Exc, post=Exc, target='exc', synapse=STP)
proj_ee.connect_fixed_probability(probability=0.1, weights=Normal(Aee, (Aee/2.0), min=0.2*Aee, max=2.0*Aee))
proj_ee.U = Normal(Uee, (Uee/2.0), min=0.1, max=0.9)
proj_ee.tau_rec = Normal(tau_rec_ee, (tau_rec_ee/2.0), min=5.0)
proj_ee.tau_facil = dt # Cannot be 0!
proj_ei = Projection(pre=Inh, post=Exc, target='inh', synapse=STP)
proj_ei.connect_fixed_probability(probability=0.1, weights=Normal(Aei, (Aei/2.0), min=0.2*Aei, max=2.0*Aei))
proj_ei.U = Normal(Uei, (Uei/2.0), min=0.1, max=0.9)
proj_ei.tau_rec = Normal(tau_rec_ei, (tau_rec_ei/2.0), min=5.0)
proj_ei.tau_facil = dt # Cannot be 0!
proj_ie = Projection(pre=Exc, post=Inh, target='exc', synapse=STP)
proj_ie.connect_fixed_probability(probability=0.1, weights=Normal(Aie, (Aie/2.0), min=0.2*Aie, max=2.0*Aie))
proj_ie.U = Normal(Uie, (Uie/2.0), min=0.001, max=0.07)
proj_ie.tau_rec = Normal(tau_rec_ie, (tau_rec_ie/2.0), min=5.0)
proj_ie.tau_facil = Normal(tau_facil_ie, (tau_facil_ie/2.0), min=5.0)
proj_ii = Projection(pre=Inh, post=Inh, target='inh', synapse=STP)
proj_ii.connect_fixed_probability(probability=0.1, weights=Normal(Aii, (Aii/2.0), min=0.2*Aii, max=2.0*Aii))
proj_ii.U = Normal(Uii, (Uii/2.0), min=0.001, max=0.07)
proj_ii.tau_rec = Normal(tau_rec_ii, (tau_rec_ii/2.0), min=5.0)
proj_ii.tau_facil = Normal(tau_facil_ii, (tau_facil_ii/2.0), min=5.0)
compile()
# Record
Me = Monitor(Exc, 'spike')
Mi = Monitor(Inh, 'spike')
# Simulate
duration = 10000.0
simulate(duration, measure_time=True)
# Retrieve recordings
data_exc = Me.get()
data_inh = Mi.get()
te, ne = Me.raster_plot(data_exc['spike'])
ti, ni = Mi.raster_plot(data_inh['spike'])
# Histogramm of the exc population
h = Me.histogram(data_exc['spike'], bins=1.0)
# Mean firing rate of each excitatory neuron
rates = []
for neur in data_exc['spike'].keys():
rates.append(len(data_exc['spike'][neur])/duration*1000.0)
# Plot
import matplotlib.pyplot as plt
plt.subplot(3,1,1)
plt.plot(te, ne, 'b.', markersize=1.0)
plt.plot(ti, ni, 'b.', markersize=1.0)
plt.xlim((0, duration)); plt.ylim((0,500))
plt.xlabel('Time (ms)')
plt.ylabel('# neuron')
plt.subplot(3,1,2)
plt.plot(h/400.)
plt.xlabel('Time (ms)')
plt.ylabel('Net activity')
plt.subplot(3,1,3)
plt.plot(sorted(rates))
plt.ylabel('Spikes / sec')
plt.xlabel('# neuron')
plt.show() | ANNarchy | /ANNarchy-4.7.2.6.tar.gz/ANNarchy-4.7.2.6/examples/tsodyks_markram/TsodyksMarkram.py | TsodyksMarkram.py |
from ANNarchy import *
from ANNarchy.extensions.image import *
from ANNarchy.extensions.convolution import Convolution, Pooling
# Definition of the neurons
Linear = Neuron(equations="r=sum(exc): min=0.0")
DNF = Neuron(parameters="tau=10.0", equations="tau*dr/dt + r = sum(exc) + sum(inh): min=0.0, max=1.0")
# Population getting the video stream
width = 640
height = 480
video = VideoPopulation(geometry=(height, width, 3))
# Define a normalizedred filter with dimensions 10*10*3
extent = 10
red_filter = [[ [2.0/extent**2, -1.0/extent**2, -1.0/extent**2] for j in range(extent) ] for i in range(extent)]
# Create a population of DNF neurons downscaling the image with a factor 10
dnf = Population(geometry=(height/extent, width/extent), neuron = DNF)
# Create the convolution usinf the red filter
ff = Convolution(pre=video, post=dnf, target='exc').connect_filter(weights=red_filter)
# Create difference of Gaussians lateral connections for denoising/competition
lat = Projection(pre=dnf, post=dnf, target='inh').connect_dog(amp_pos = 0.15, sigma_pos = 0.05, amp_neg = 0.1, sigma_neg = 0.5, limit=0.1)
# Compile
compile()
# Start the camera
video.start_camera(0)
# Visualize the images using PyQtGraph
try:
from pyqtgraph.Qt import QtGui, QtCore
import pyqtgraph as pg
except:
print('PyQtGraph is not installed, can not visualize the network.')
exit(0)
# Wrapping class
class Viewer(object):
" Class to visualize the network activity using PyQtGraph."
def __init__(self, video, result):
self.video = video
self.result = result
app = pg.mkQApp()
self.win = pg.GraphicsWindow(title="Live webcam")
self.win.resize(640,480)
box = self.win.addViewBox(lockAspect=True)
box.invertY()
self.vis = pg.ImageItem()
box.addItem(self.vis)
box = self.win.addViewBox(lockAspect=True)
box.invertY()
self.res = pg.ImageItem()
box.addItem(self.res)
self.win.show()
self.lastUpdate = pg.ptime.time()
self.avgFps = 0.0
def update(self):
# Set the input
self.video.grab_image()
# Simulate for 10 ms with a new input
simulate(5.0)
# Refresh the GUI
self.vis.setImage(np.swapaxes(self.video.r,0,1))
self.res.setImage(np.swapaxes(self.result.r,0,1))
# Listen to mouse/keyboard events
QtGui.QApplication.processEvents()
# FPS
now = pg.ptime.time()
fps = 1.0 / (now - self.lastUpdate)
self.lastUpdate = now
self.avgFps = self.avgFps * 0.8 + fps * 0.2
print(self.avgFps)
def run(self):
timer = QtCore.QTimer()
timer.timeout.connect(self.update)
timer.start(0)
QtGui.QApplication.instance().exec_()
timer.stop()
# Start the GUI
view = Viewer(video, dnf)
view.run() | ANNarchy | /ANNarchy-4.7.2.6.tar.gz/ANNarchy-4.7.2.6/examples/image/Webcam.py | Webcam.py |
# # Image Processing
# This simple example in `examples/image` demonstrates how to load images directly into the firing rates of a population and apply basic linear filters on it.
#
# It relies on the ANNarchy extensions `image` and `convolution` which must be explicitly imported:
from ANNarchy import *
from ANNarchy.extensions.image import *
from ANNarchy.extensions.convolution import Convolution, Pooling
clear()
# `ANNarchy.extensions.image` depends on the Python bindings of OpenCV, they must be installed before running the script.
#
# We first create an `ImagePopulation` that will load images:
image = ImagePopulation(geometry=(480, 640, 3))
# Its geometry specifies the size of the images that can be loaded, here 640x480 RGB images. Note the geometry must be of the form (height, width, channels), where channels is 1 for grayscale images and 3 for color images.
#
# The next step is to reduce the size of the image, what can be done by using the `Pooling` class of the `convolution` extension.
#
# We define a dummy artificial neuron, whose firing rate `r` will simply be the sum of excitatory connections /ensured to be positive, but this should always be the case). We then create a smaller population `pooled` with this neuron type, and connect it to the `ImagePopulation` using mean-pooling:
# Simple ANN
LinearNeuron = Neuron(equations="r=sum(exc): min=0.0")
# Subsampling population
pooled = Population(geometry=(48, 64, 3), neuron = LinearNeuron)
# Mean-pooling projection
pool_proj = Pooling(pre=image, post=pooled, target='exc', operation='mean')
pool_proj.connect_pooling()
# The `pooled` population reduces the size of the image by a factor ten (defined by the size of the population) by averaging the pixels values over 10x10 regions (`operation` is set to `'mean'`, but one could use `'max'` or `'min'`). The `connect_pooling()` connector creates the "fake" connection pattern (as no weights are involved).
#
# Let's apply now a 3x3 box filter on each channel of the pooled population:
# Smoothing population
smoothed = Population(geometry=(48, 64, 3), neuron = LinearNeuron)
# Box filter projection
box_filter = np.ones((3, 3, 1))/9.
smooth_proj = Convolution(pre=pooled, post=smoothed, target='exc')
smooth_proj.connect_filter(weights=box_filter)
# To perform a convolution operation on the population (or more precisely a cross-correlation), we call the `connect_filter()` connector method of the `Convolution` projection. It requires to define a kernel (`weights`) that will be convolved over the input population. Here we use a simple box filter, but any filter can be used.
#
# As the `pooled` population has three dimensions and we want to smooth the activities per color channel, we need to define a (3, 3, 1) kernel. If we wanted to smooth also over the color channels, we could have used a (3, 3) filter: the resulting population would have the shape (48, 64).
#
# We now apply a bank of three filters, each selective to a particular color (red/green/blue). This filters do not have a spatial extent (1x1 convolution), but sum over the third dimension (the color channels):
# Convolution population
filtered = Population(geometry=(48, 64, 3), neuron = LinearNeuron)
# Red/Green/Blue filter bank
filter_bank = np.array([
[[ [2.0, -1.0, -1.0] ]] , # Red filter
[[ [-1.0, 2.0, -1.0] ]] , # Blue filter
[[ [-1.0, -1.0, 2.0] ]] # Green filter
])
filter_proj = Convolution(pre=smoothed, post=filtered, target='exc')
filter_proj.connect_filters(weights=filter_bank)
# Each of the three filter has the shape (1, 1, 3). The result of each convolution would then be (48, 64), but as there are three filters, the output population is (48, 64, 3). The last dimension does not correspond to the number of color channels, but to the number of filters in the bank: if you add a filter, the population will have to be (48, 64, 4).
#
# Banks of filters require to use `connect_filters()` instead of `connect_filter()`.
compile()
# After compilation, we can load an image into the input population:
image.set_image('test.jpg')
# To see the result, we need to simulate for four time steps (4 milliseconds, as `dt=1.0`).
#
# 1. Step 1: The `image` population loads the image.
# 2. Step 2: The `pooled` population subsamples the image.
# 3. Step 3: The `smoothed` population filters the pooled image.
# 4. Step 4: The bank of filters are applied by `filtered`.
simulate(4.0)
import matplotlib.pyplot as plt
fig = plt.figure(figsize=(20.0, 20.0))
plt.subplot(532)
plt.imshow(image.r)
plt.title('Original')
plt.subplot(534)
plt.imshow(image.r[:,:,0], cmap='gray', interpolation='nearest', vmin= 0.0, vmax=1.0)
plt.title('image R')
plt.subplot(535)
plt.imshow(image.r[:,:,1], cmap='gray', interpolation='nearest', vmin= 0.0, vmax=1.0)
plt.title('image G')
plt.subplot(536)
plt.imshow(image.r[:,:,2], cmap='gray', interpolation='nearest', vmin= 0.0, vmax=1.0)
plt.title('image B')
plt.subplot(537)
plt.imshow(pooled.r[:,:,0], cmap='gray', interpolation='nearest', vmin= 0.0, vmax=1.0)
plt.title('pooled R')
plt.subplot(538)
plt.imshow(pooled.r[:,:,1], cmap='gray', interpolation='nearest', vmin= 0.0, vmax=1.0)
plt.title('pooled G')
plt.subplot(539)
plt.imshow(pooled.r[:,:,2], cmap='gray', interpolation='nearest', vmin= 0.0, vmax=1.0)
plt.title('pooled B')
plt.subplot(5, 3, 10)
plt.imshow(smoothed.r[:,:,0], cmap='gray', interpolation='nearest', vmin= 0.0, vmax=1.0)
plt.title('smoothed R')
plt.subplot(5, 3, 11)
plt.imshow(smoothed.r[:,:,1], cmap='gray', interpolation='nearest', vmin= 0.0, vmax=1.0)
plt.title('smoothed G')
plt.subplot(5, 3, 12)
plt.imshow(smoothed.r[:,:,2], cmap='gray', interpolation='nearest', vmin= 0.0, vmax=1.0)
plt.title('smoothed B')
plt.subplot(5, 3, 13)
plt.imshow(filtered.r[:,:,0], cmap='gray', interpolation='nearest', vmin= 0.0, vmax=1.0)
plt.title('filtered R')
plt.subplot(5, 3, 14)
plt.imshow(filtered.r[:,:,1], cmap='gray', interpolation='nearest', vmin= 0.0, vmax=1.0)
plt.title('filtered G')
plt.subplot(5, 3, 15)
plt.imshow(filtered.r[:,:,2], cmap='gray', interpolation='nearest', vmin= 0.0, vmax=1.0)
plt.title('filtered B')
plt.show() | ANNarchy | /ANNarchy-4.7.2.6.tar.gz/ANNarchy-4.7.2.6/examples/image/Image.py | Image.py |
# Webcam
The script `examples/image/Webcam.py` applies a red filter on the input from the webcam, and isolates one mode using a dynamical neural field.
Most of the concepts are similar to the Image Processing example. The `VideoPopulation` object also requires the Python bindings to OpenCV.
```
from ANNarchy import *
from ANNarchy.extensions.image import *
from ANNarchy.extensions.convolution import Convolution, Pooling
clear()
# Definition of the neurons
LinearNeuron = Neuron(equations="r=sum(exc): min=0.0")
DNF = Neuron(parameters="tau=10.0", equations="tau*dr/dt + r = sum(exc) + sum(inh): min=0.0, max=1.0")
# Population getting the video stream
width = 640
height = 480
video = VideoPopulation(geometry=(height, width, 3))
# Subsampling population
pooled = Population(geometry=(48, 64, 3), neuron = LinearNeuron)
# Mean-pooling projection
pool_proj = Pooling(pre=video, post=pooled, target='exc', operation='mean')
pool_proj.connect_pooling()
# Define a red filter with no spatial extent
red_filter = [[ [2.0, -1.0, -1.0] ]]
# Create a population of DNF neurons downscaling the image with a factor 10
dnf = Population(geometry=(48, 64), neuron = DNF)
# Create the convolution using the red filter
ff = Convolution(pre=pooled, post=dnf, target='exc')
ff.connect_filter(weights=red_filter)
# Create difference of Gaussians lateral connections for denoising/competition
lat = Projection(pre=dnf, post=dnf, target='inh')
lat.connect_dog(amp_pos=0.2, sigma_pos=0.1, amp_neg=0.1, sigma_neg=0.7)
```
The `VideoPopulation` acquires images from the webcam: here the webcam should be able to deliver 640x480 colored images.
The corresponding population is then subsampled with a factor 10, and a red filter is applied on it. This feeds a DNF (see the Neural Field" example) which selects the region with the highest density.
```
compile()
```
We can now start the camera 0 (`/dev/video0`, adapt it to your machine):
```
video.start_camera(0)
```
A simple GUI based on PyQtGraph allows to display the input and output of the network:
```
try:
from pyqtgraph.Qt import QtGui, QtCore
import pyqtgraph as pg
except:
print('PyQtGraph is not installed, can not visualize the network.')
exit(0)
# Wrapping class
class Viewer(object):
" Class to visualize the network activity using PyQtGraph."
def __init__(self, video, result):
self.video = video
self.result = result
app = pg.mkQApp()
self.win = pg.GraphicsWindow(title="Live webcam")
self.win.resize(640,480)
box = self.win.addViewBox(lockAspect=True)
box.invertY()
self.vis = pg.ImageItem()
box.addItem(self.vis)
box = self.win.addViewBox(lockAspect=True)
box.invertY()
self.res = pg.ImageItem()
box.addItem(self.res)
self.win.show()
self.lastUpdate = pg.ptime.time()
self.avgFps = 0.0
def update(self):
# Set the input
self.video.grab_image()
# Simulate for 10 ms with a new input
simulate(5.0)
# Refresh the GUI
self.vis.setImage(np.swapaxes(self.video.r,0,1))
self.res.setImage(np.swapaxes(self.result.r,0,1))
# Listen to mouse/keyboard events
QtGui.QApplication.processEvents()
# FPS
now = pg.ptime.time()
fps = 1.0 / (now - self.lastUpdate)
self.lastUpdate = now
self.avgFps = self.avgFps * 0.8 + fps * 0.2
# print(self.avgFps)
def run(self):
timer = QtCore.QTimer()
timer.timeout.connect(self.update)
timer.start(0)
QtGui.QApplication.instance().exec_()
timer.stop()
# Start the GUI
view = Viewer(video, dnf)
view.run()
video.release()
```
| ANNarchy | /ANNarchy-4.7.2.6.tar.gz/ANNarchy-4.7.2.6/examples/image/Webcam.ipynb | Webcam.ipynb |
try:
from pyqtgraph.Qt import QtGui, QtCore
import pyqtgraph as pg
except:
print('PyQtGraph is not installed on your system, can not visualize the network.')
exit(0)
try:
import pyqtgraph.opengl as gl
except:
print('OpenGL is not installed on your system, can not visualize the network.')
exit(0)
import numpy as np
class GLViewer(object):
" Class to visualize the network activity using PyQtGraph and openGL."
def __init__(self, populations, func, update_rate):
# Parameters
self.populations = populations
self.func = func
self.update_rate = update_rate
# Window
self.win = gl.GLViewWidget()
self.win.show()
self.win.setCameraPosition(distance=40)
# Prepare the plots
self.plots = []
shift = 0
for pop in self.populations:
p = gl.GLSurfacePlotItem(
x = np.linspace(0, pop.geometry[0]-1, pop.geometry[0]),
y = np.linspace(0, pop.geometry[1]-1, pop.geometry[1]),
shader='heightColor',
computeNormals=False,
smooth=False
)
p.translate(shift, -10, -1)
self.win.addItem(p)
self.plots.append(p)
shift -= 25
def scale(self, data):
" Colors are shown in the range [-1, 1] per default."
return 1.8 * data -0.9
def update(self):
"Callback"
# Simulate for 200ms
self.func(self.update_rate)
# Refresh the GUI
for i in range(len(self.populations)):
self.plots[i].setData(z=self.scale(self.populations[i].r))
# Listen to mouse/keyboard events
QtGui.QApplication.processEvents()
def run(self):
"Infinite loop"
timer = QtCore.QTimer()
timer.timeout.connect(self.update)
timer.start(0)
QtGui.QApplication.instance().exec()
def loop_bubbles(populations, func, update_rate):
"Launches the GL GUI and rotates the bubble infinitely."
# Create the GUI using PyQtGraph
app = QtGui.QApplication([])
viewer = GLViewer(populations, func, update_rate)
# Start the simulation forever
viewer.run() | ANNarchy | /ANNarchy-4.7.2.6.tar.gz/ANNarchy-4.7.2.6/examples/neural_field/Viz.py | Viz.py |
from ANNarchy import *
setup(dt=0.1)
# Rate-coded input neuron
input_neuron = Neuron(
parameters = "baseline = 0.0",
equations = "r = baseline"
)
# Rate-coded output neuron
simple_neuron = Neuron(
equations = "r = sum(exc)"
)
# Rate-coded population for input
pop1 = Population(geometry=1, neuron=input_neuron)
# Poisson Population to encode
pop2 = PoissonPopulation(geometry=1000, target="exc")
proj = Projection(pop1, pop2, 'exc').connect_all_to_all(weights=1.)
# Rate-coded population to decode
pop3 = Population(geometry=1000, neuron =simple_neuron)
proj = DecodingProjection(pop2, pop3, 'exc', window=10.0)
def diagonal(pre, post, weights):
"Simple connector pattern to progressively connect each post-synaptic neuron to a growing number of pre-synaptic neurons"
lil = CSR()
for rk_post in range(post.size):
lil.add(rk_post, range((rk_post+1)), [weights], [0] )
return lil
proj.connect_with_func(method=diagonal, weights=1.)
compile()
# Monitors
m1 = Monitor(pop1, 'r')
m2 = Monitor(pop2, 'spike')
m3 = Monitor(pop3, 'r')
# Simulate
duration = 250.
# 0 Hz
pop1.baseline = 0.0
simulate(duration)
# 10 Hz
pop1.baseline = 10.0
simulate(duration)
# 50 Hz
pop1.baseline = 50.0
simulate(duration)
# 100 Hz
pop1.baseline = 100.0
simulate(duration)
# Get recordings
data1 = m1.get()
data2 = m2.get()
data3 = m3.get()
# Raster plot of the spiking population
t, n = m2.raster_plot(data2['spike'])
# Variance of the the decoded firing rate
data_10 = data3['r'][int(1.0*duration/dt()):int(2*duration/dt()), :]
data_50 = data3['r'][int(2.0*duration/dt()):int(3*duration/dt()), :]
data_100 = data3['r'][int(3.0*duration/dt()):int(4*duration/dt()), :]
var_10 = np.mean(np.abs((data_10 - 10.)/10.), axis=0)
var_50 = np.mean(np.abs((data_50 - 50.)/50.), axis=0)
var_100 = np.mean(np.abs((data_100 - 100.)/100.), axis=0)
### Plot the results
import matplotlib.pyplot as plt
plt.subplot(3,1,1)
plt.plot(t, n, '.', markersize=0.5)
plt.title('a) Raster plot')
plt.xlabel('Time (ms)')
plt.ylabel('# neurons')
plt.xlim((0, 4*duration))
plt.subplot(3,1,2)
plt.plot(np.arange(0, 4*duration, 0.1), data1['r'][:, 0], label='Original firing rate')
plt.plot(np.arange(0, 4*duration, 0.1), data3['r'][:, 999], label='Decoded firing rate')
plt.legend(frameon=False, loc=2)
plt.title('b) Decoded firing rate')
plt.xlabel('Time (ms)')
plt.ylabel('Activity (Hz)')
plt.subplot(3,1,3)
plt.plot(var_10, label='10 Hz')
plt.plot(var_50, label='50 Hz')
plt.plot(var_100, label='100 Hz')
plt.legend(frameon=False)
plt.title('c) Precision')
plt.xlabel('# neurons used for decoding')
plt.ylabel('Normalized error')
plt.ylim((0,1))
plt.show() | ANNarchy | /ANNarchy-4.7.2.6.tar.gz/ANNarchy-4.7.2.6/examples/hybrid/Hybrid.py | Hybrid.py |
from ANNarchy import *
from ANNarchy.extensions.bold import *
import matplotlib.pyplot as plt
# A population of 100 izhikevich neurons
pop0 = Population(100, neuron=Izhikevich)
pop1 = Population(100, neuron=Izhikevich)
# Set noise to create some baseline activity
pop0.noise = 5.0; pop1.noise = 5.0
# Compute mean firing rate in Hz on 100ms window
pop0.compute_firing_rate(window=100.0)
pop1.compute_firing_rate(window=100.0)
# Create required monitors
mon_pop0 = Monitor(pop0, ["r"], start=False)
mon_pop1 = Monitor(pop1, ["r"], start=False)
m_bold = BoldMonitor(
populations = [pop0, pop1], # recorded populations
bold_model = balloon_RN(), # BOLD model to use (default is balloon_RN)
mapping = {'I_CBF': 'r'}, # from pop.r to I_CBF
normalize_input = 2000, # time window to compute the baseline activity
recorded_variables = ["I_CBF", "BOLD"] # variables to be recorded
)
# Compile and initialize the network
compile()
# Ramp up time
simulate(1000)
# Start recording
mon_pop0.start()
mon_pop1.start()
m_bold.start()
# we manipulate the noise for the half of the neurons
simulate(5000) # 5s with low noise
pop0.noise = 7.5
simulate(5000) # 5s with higher noise (one population)
pop0.noise = 5
simulate(10000) # 10s with low noise
# retrieve recordings
mean_fr1 = np.mean(mon_pop0.get("r"), axis=1)
mean_fr2 = np.mean(mon_pop1.get("r"), axis=1)
input_data = m_bold.get("I_CBF")
bold_data = m_bold.get("BOLD")
# An example evaluation, which consists of:
# A) the mean firing activity
# B) the recorded activity which serves as input to BOLD
# C) the resulting BOLD signal
plt.figure(figsize=(20,6))
grid = plt.GridSpec(1, 3, left=0.05, right=0.95)
# mean firing rate
ax1 = plt.subplot(grid[0, 0])
ax1.plot(mean_fr1, label="pop0")
ax1.plot(mean_fr2, label="pop1")
plt.legend()
ax1.set_ylabel("average mean firing rate [Hz]", fontweight="bold", fontsize=18)
# BOLD input signal
ax2 = plt.subplot(grid[0, 1])
ax2.plot(input_data)
ax2.set_ylabel("BOLD input I_CBF", fontweight="bold", fontsize=18)
# BOLD input signal
ax3 = plt.subplot(grid[0, 2])
ax3.plot(bold_data*100.0)
ax3.set_ylabel("BOLD [%]", fontweight="bold", fontsize=18)
# x-axis labels as seconds
for ax in [ax1, ax2, ax3]:
ax.set_xticks(np.arange(0,21,2)*1000)
ax.set_xticklabels(np.arange(0,21,2))
ax.set_xlabel("time [s]", fontweight="bold", fontsize=18)
plt.show() | ANNarchy | /ANNarchy-4.7.2.6.tar.gz/ANNarchy-4.7.2.6/examples/bold_monitor/BOLD.py | BOLD.py |
from ANNarchy import *
from ANNarchy.extensions.bold import *
import matplotlib.pyplot as plt
# Two populations of 100 izhikevich neurons
pop0 = Population(100, neuron=Izhikevich)
pop1 = Population(100, neuron=Izhikevich)
# Set noise to create some baseline activity
pop0.noise = 5.0; pop1.noise = 5.0
# Compute mean firing rate in Hz on 100ms window
pop0.compute_firing_rate(window=100.0)
pop1.compute_firing_rate(window=100.0)
# Create required monitors
mon_pop0 = Monitor(pop0, ["r"], start=False)
mon_pop1 = Monitor(pop1, ["r"], start=False)
m_bold = BoldMonitor(
populations = [pop0, pop1], # recorded populations
bold_model = balloon_two_inputs(), # BOLD model to use
# mean firing rate as source variable coupled to the input variable I_CBF
# membrane potential as source variable coupled to the input variable I_CMRO2
mapping={'I_CBF': 'r','I_CMRO2': 'v'},
normalize_input=2000, # time window to compute the baseline
recorded_variables=["I_CBF", "I_CMRO2", "BOLD"]
)
# Compile and initialize the network
compile()
# Ramp up time
simulate(1000)
# Start recording
mon_pop0.start()
mon_pop1.start()
m_bold.start()
# we manipulate the noise for the half of the neurons
simulate(5000) # 5s with low noise
pop0.noise = 7.5
simulate(5000) # 5s with higher noise (one population)
pop0.noise = 5
simulate(10000) # 10s with low noise
# retrieve the recordings
mean_fr1 = np.mean(mon_pop0.get("r"), axis=1)
mean_fr2 = np.mean(mon_pop1.get("r"), axis=1)
If_data = m_bold.get("I_CBF")
Ir_data = m_bold.get("I_CMRO2")
bold_data = m_bold.get("BOLD")
# An example evaluation, which consists of:
# A) the mean firing activity
# B) the recorded activity which serves as input to BOLD
# C) the resulting BOLD signal
plt.figure(figsize=(20,6))
grid = plt.GridSpec(1, 3, left=0.05, right=0.95)
# mean firing rate
ax1 = plt.subplot(grid[0, 0])
ax1.plot(mean_fr1, label="pop0")
ax1.plot(mean_fr2, label="pop1")
plt.legend()
ax1.set_ylabel("average mean firing rate [Hz]", fontweight="bold", fontsize=18)
# BOLD input signal
ax2 = plt.subplot(grid[0, 1])
ax2.plot(If_data, label='I_CBF')
ax2.plot(Ir_data, label='I_CMRO2')
ax2.set_ylabel("BOLD input variables", fontweight="bold", fontsize=18)
ax2.legend()
# BOLD input signal as percent
ax3 = plt.subplot(grid[0, 2])
ax3.plot(bold_data*100.0)
ax3.set_ylabel("BOLD [%]", fontweight="bold", fontsize=18)
# x-axis labels as seconds
for ax in [ax1, ax2, ax3]:
ax.set_xticks(np.arange(0,21,2)*1000)
ax.set_xticklabels(np.arange(0,21,2))
ax.set_xlabel("time [s]", fontweight="bold", fontsize=18)
plt.show() | ANNarchy | /ANNarchy-4.7.2.6.tar.gz/ANNarchy-4.7.2.6/examples/bold_monitor/BOLD_two_inputs.py | BOLD_two_inputs.py |
from ANNarchy import *
duration = 1000.0
setup(dt=0.1)
# ###########################################
# Define the neurons
# ###########################################
LIF = Neuron(
parameters = """
tau_m = 20.0 : population
tau_e = 5.0 : population
tau_i = 10.0 : population
E_rest = -49.0 : population
E_thresh = -50.0 : population
E_reset = -60.0 : population
""",
equations = """
tau_m * dv/dt = E_rest -v + g_exc - g_inh
tau_e * dg_exc/dt = -g_exc
tau_i * dg_inh/dt = -g_inh
""",
spike = "v > E_thresh",
reset = "v = E_reset"
)
# ###########################################
# Define the synapse
# ###########################################
STP = Synapse(
parameters = """
tau_rec = 200.0 : projection
tau_facil = 20.0 : projection
U = 0.2 : projection
""",
equations = """
dx/dt = (1 - x)/tau_rec : init = 1.0, event-driven
du/dt = (U - u)/tau_facil : init = 0.2, event-driven
""",
pre_spike="""
g_target += w * u * x
x *= (1 - u)
u += U * (1 - u)
"""
)
# ###########################################
# Create the populations
# ###########################################
P = Population(geometry=4000, neuron=LIF)
P.v = Uniform(-60.0, -50.0)
Pe = P[:3200]
Pi = P[3200:]
# ###########################################
# Create the projections
# ###########################################
con_e = Projection(pre=Pe, post=P, target='exc', synapse = STP).connect_fixed_probability(weights=1.62, probability=0.02)
con_i = Projection(pre=Pi, post=P, target='inh').connect_fixed_probability(weights=9.0, probability=0.02)
# ###########################################
# Compile the network
# ###########################################
compile()
# ###########################################
# Run without plasticity
# ###########################################
m = Monitor(P, 'spike')
simulate(duration, measure_time=True)
data = m.get()
# ###########################################
# Make plots
# ###########################################
t, n = m.raster_plot(data['spike'])
rates = m.population_rate(data['spike'], 5.0)
print('Total number of spikes: ' + str(len(t)))
import matplotlib.pyplot as plt
plt.subplot(211)
plt.plot(t, n, '.')
plt.xlabel('Time (ms)')
plt.ylabel('Neuron number')
plt.subplot(212)
plt.plot(np.arange(rates.size)*dt(), rates)
plt.show() | ANNarchy | /ANNarchy-4.7.2.6.tar.gz/ANNarchy-4.7.2.6/examples/pyNN/short_term_plasticity2.py | short_term_plasticity2.py |
from __future__ import print_function
import os
__author__ = 'naitiz'
class Fore:
BLACK = 30
RED = 31
GREEN = 32
YELLOW = 33
BLUE = 34
MAGENTA = 35
CYAN = 36
WHITE = 37
RESET = 39
# These are fairly well supported, but not part of the standard.
LIGHTBLACK_EX = 90
LIGHTRED_EX = 91
LIGHTGREEN_EX = 92
LIGHTYELLOW_EX = 93
LIGHTBLUE_EX = 94
LIGHTMAGENTA_EX = 95
LIGHTCYAN_EX = 96
LIGHTWHITE_EX = 97
class Back:
BLACK = 40
RED = 41
GREEN = 42
YELLOW = 43
BLUE = 44
MAGENTA = 45
CYAN = 46
WHITE = 47
RESET = 49
# These are fairly well supported, but not part of the standard.
LIGHTBLACK_EX = 100
LIGHTRED_EX = 101
LIGHTGREEN_EX = 102
LIGHTYELLOW_EX = 103
LIGHTBLUE_EX = 104
LIGHTMAGENTA_EX = 105
LIGHTCYAN_EX = 106
LIGHTWHITE_EX = 107
class Style:
BOLD = 1
FAINT = 2
ITALIC = 3
UNDERLINE = 4
BLINK = 5
REVERSE = 7
CONCEALED = 8
CROSSED = 9
NORMAL = 22
RESET = '\033[0m'
def colored(text, fg=None, bg=None, st=None):
"""Colorize text.
See https://en.wikipedia.org/wiki/ANSI_escape_code
Available text foreground:
red, green, yellow, blue, magenta, cyan, white.
Available text background:
red, green, yellow, blue, magenta, cyan, white.
Available style:
bold, dark, underline, blink, reverse, concealed.
Terminal properties:
Terminal bold dark underline blink reverse concealed
xterm yes no yes bold yes yes
linux yes yes bold yes yes no
rxvt yes no yes bold/black yes no
dtterm yes yes yes reverse yes yes
teraterm reverse no yes rev/red yes no
aixterm normal no yes no yes yes
PuTTY color no yes no yes no
Windows no no no no yes no
Cygwin SSH yes no color color color yes
Mac Terminal yes no yes yes yes yes
Example:
colored('Hello, World!', 'red', 'grey', ['blue', 'blink'])
colored('Hello, World!', 'green')
"""
if os.getenv('ANSI_COLORS_DISABLED') is None:
fmt_str = '\033[%dm%s'
if fg is not None:
text = fmt_str % (fg, text)
if bg is not None:
text = fmt_str % (bg, text)
if st is not None:
from collections import Iterable
if isinstance(st, Iterable):
for s in st:
text = fmt_str % (s, text)
else:
text = fmt_str % (st, text)
text += RESET
return text
def cprint(text, fg=None, bg=None, st=None, **kwargs):
"""Print colorize text.
It accepts arguments of print function.
"""
print((colored(text, fg, bg, st)), **kwargs)
pass
def print_format_table():
"""
prints table of formatted text format options
"""
for style in dir(Style):
if not str(style).startswith('_'):
for fg in dir(Fore):
if not str(fg).startswith('_'):
s1 = ''
for bg in dir(Back):
if not str(bg).startswith('_'):
format = ';'.join(
[str(getattr(Style, style)), str(getattr(Fore, fg)), str(getattr(Back, bg))])
s1 += '\033[%sm %s \033[0m' % (format, format)
print(s1)
print('\n')
if __name__ == '__main__':
print_format_table()
cprint("fuck egg", Fore.GREEN, Back.BLUE, Style.UNDERLINE) | ANSI-Color | /ANSI-Color-1.0.2.tar.gz/ANSI-Color-1.0.2/ansicolor/termcolor.py | termcolor.py |
ANSI Colors
===========
A Python script and module to simply use ANSI Colors in a terminal.

----
### Author:
Lilian Besson.
### Language:
Python v2.7+ (but *not* v3).
A P3K compatible version is in progress !
This project is now hosted on [Pypi module repository](<https://pypi.python.org/pypi/ANSIColors-balises> "Pypi !").
Documentation
-------------
The complete documentation of the module is available, see [here on pythonhosted.org](<http://pythonhosted.org/ANSIColors-balises/> "on-line").
**All the details (installation, options, etc) are in the doc**.
Anyway, here are somes infos.
----
Installation
============
The project is just the main script **ANSIColors.py**.
How to install it
-----------------
Download or copy it from this *git* repository, then launch ``python setup.py install``.
More details can be found in the **INSTALL** file.
Dependencies
------------
The project is *entirely written in Python*, version *2.7.3*.
For more details about the **Python** language, see [the official site](<http://www.python.org> "Python power !").
Python 2.7.1 or higher is **required**.
Plateform(s)
------------
The project have been *developped* on *GNU/Linux* (Ubuntu 11.10).
#### Warning (Windows)
It also have been quickly tested on *Windows 7* **with the Cygwin environment** and Python 2.7.
#### Warning (Mac OS X)
It shall also work on *Mac OS X*, but **not been tested**.
Any suggestion or returns is welcome !
About the project
=================
This project was part of my work realised for the MPRI 1-21 **net programming lesson**.
The MPRI is the **Parisian Master for Research in Computer Science** (*Master Parisien de Recherche en Informatique* in French).
About the doc
=============
The documentation is produced mainly with **Sphinx**, the Python's documentation generator.
I wrote a few scripts to help *Sphinx* to do what I wanted, and to generate a *PyDoc* version of the doc too.
Those scripts constitute now an independant and a powerful project.
It is hosted [here on my Google Site](https://sites.google.com/site/naereencorp/liste-des-projets/makepydoc "check this out too ;) !")
Contact me
----------
Feel free to contact me, either with a bitbucket message (my profile is [lbesson](https://bitbucket.org/lbesson/ "here")), or via an email at **lilian DOT besson AT ens-cachan DOT fr**.
License
-------
This project is released under the **GPLv3 license**, for more details, take a look at the LICENSE file in the source.
*Basically, that allow you to use all or part of the project for you own business.* | ANSIColors-balises | /ANSIColors-balises-1.9.9.public.tar.gz/ANSIColors-balises-1.9.9.public/README | README |
#########################
##### Program part ######
#########################
"""\n\
List of all colours:
==================
black, red, green, yellow, blue, magenta, cyan, white:
Bold colours.
Bblack, Bred, Bgreen, Byellow, Bblue, Bmagenta, Bcyan, Bwhite:
Normal colours (no bold).
Black, Red, Green, Yellow, Blue, Magenta, Cyan, White:
Background colours.
blink, Blink:
Blink special caracters (Blink is faster than blink).
.. warning::
Those are **not supported by all terminal emulator**.
For example, gnome-terminal and terminator **doesn't** support it,
but mintty.exe (Cygwin Windows terminal) support it.
reset, nocolors:
Special caracters to reinitialized ANSI codes buffer, or to do nothing.
default, Default:
default foreground colour, default background colour.
italic, Italic :
italic on, off. **Not always supported**.
b, B :
bold on, off,
u, U :
underline on, off,
neg, Neg :
reverse video on, off. **Not always supported**.
clear:
try to clear the screen. **Not always supported**.
el:
try to erase the current line. **Not always supported**.
Usefull to use with ``sys.stdout.write``
and make the current printed line change !
bell:
try to make an alarm sound. Also used to end the *xtitle* sequence.
warning, question, WARNING, INFO, ERROR:
aliases for classic markup (/!\\, /?\\, 'WARNING', 'INFO' and 'ERROR').
"""
__author__='Lilian BESSON (mailto:lilian.besson@normale.fr)'
__version__='1.9.9.public'
__date__='mar. 19/03/2013 at 12h:25m:49s'
#1###############
# Usual Modules #
import os, sys, subprocess
################################################################################
# TODO: arrange this.
# TODO: make them hidden from the interface of the script
# idea: remove from __all__.
########################################
#### Default values for new parsers ####
def _default_epilog(version, date, author):
""" This return the default epilog used to new parsers,
which contains a copyright paragraph, determined by the three arguments version, date, author.
"""
return """\n\
<yellow>Copyrigths:
===========<reset>
Version %s, (c) 2012-2013 (last modif: %s). Written in Python 2.7.3 (<u>http://www.python.org<U>).
The parser of command line arguments is generated with the argparse module.
By %s,
ENS de Cachan (M1 Mathematics & M1 Computer Science MPRI).
For Naereen Corp.,
<u>mailto:naereen-corporation@laposte.net<U>.
<u>https://sites.google.com/site/naereencorp<U>.""" % (version, date, author)
#: The default description, used when generate a parser by _parser_default function !
_default_description = "WARNING: No description had been given to _parser_default..."
def _add_default_options(parser, version=__version__, date=__date__, author=__author__):
""" _parser_default(parser, version, date, author) -> argparse.ArgumentParser instance.
Return the parser *parser*, modified by adding default options for the project,
which put the options : --version, --verbose, --noANSI and --noUTF
and others basic options."""
parser.add_argument('--version', action='version', version='%(prog)s '+version)
#################################################
#: Let those two lines, just to remember that others stuffs.
parser.add_argument('--noANSI', help="If present, ANSI escape code from ANSIColors are *disable*.", action='store_true', default=False)
parser.add_argument('--ANSI', help="If present, ANSI escape code from ANSIColors are *forced* to be printed (even if the output is detected to be a pipe).", action='store_true', default=False)
return parser
# To make a default parser.
def _parser_default(description=_default_description, \
epilog="WARNING: No extra epilog had been given to _parser_default...", \
version=__version__, date=__date__, author=__author__, \
preprocessor = str):
""" _parser_default(parser, version, date, author) -> argparse.ArgumentParser instance.
Make a new *parser*, initialized by adding default options for the project (with _add_default_options)
The default description is *_default_description*,
The epilog will *epilog*, then _default_epilog(version, date, author).
preprocessor can be ANSIColors.sprint or __builtin__.str (default value)
(*i.e.* a string -> string function),
and it will be used as a **preprocessor** for *description* and *epilog* value.
Example:
>>> parser = _parser_default(description='<DELETE>A description.',\
epilog='The description will no begin by the balise DELETE, thanks to sprint preprocessing.',\
preprocessor=lambda s: s.replace('<DELETE>', ''))
"""
# Passing RawDescriptionHelpFormatter as formatter_class= indicates that description and epilog are already correctly formatted and should not be line-wrapped:
# RawTextHelpFormatter maintains whitespace for all sorts of help text, including argument descriptions.
# The other formatter class available, ArgumentDefaultsHelpFormatter, will add information about the default value of each of the arguments:
try:
import argparse
parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter,\
description=preprocessor(description), prefix_chars='-+',\
epilog=preprocessor(epilog + _default_epilog(version, date, author)))
# change the function *_add_default_options*, not this one.
parser = _add_default_options(parser, version, date, author)
return parser
except ImportError:
sys.stderr.write("""ERROR : when I tried to import the 'argparse' module.
The first possible reason is that you are using a version of Python too old (< 2.7).
The other possible reason is a other version of Python that the usual CPython :
* Jython,
* IronPython,
* PyPy,
for instance, are not supported.
""")
sys.stderr.flush()
sys.exit(1)
################################################################################
ANSISupported = True
try:
#: If false, the module do almost NOTHING
ANSISupported='TERM' in os.environ and os.environ['TERM'] != 'unknown'
if ('--noANSI' in sys.argv) or (not sys.stdout.isatty()): ANSISupported = False
if '--ANSI' in sys.argv: ANSISupported = True
except Exception as e:
print "I failed badly when trying to detect if ANSIColors are supported, reason = %s" % e
ANSISupported = False
# colours bold
black="\033[01;30m" #: Black and bold.
red="\033[01;31m" #: Red and bold.
green="\033[01;32m" #: Green and bold.
yellow="\033[01;33m" #: Yellow and bold.
blue="\033[01;34m" #: Blue and bold.
magenta="\033[01;35m" #: Magenta and bold.
cyan="\033[01;36m" #: Cyan and bold.
white="\033[01;37m" #: White and bold.
# colours not bold
Bblack="\033[02;30m" #: Black and not bold.
Bred="\033[02;31m" #: Red and not bold.
Bgreen="\033[02;32m" #: Green and not bold.
Byellow="\033[02;33m" #: Yellow and not bold.
Bblue="\033[02;34m" #: Blue and not bold.
Bmagenta="\033[02;35m" #: Magenta and not bold.
Bcyan="\033[02;36m" #: Cyan and not bold.
Bwhite="\033[02;37m" #: White and not bold.
# Background colours : not very usefull
Black="\033[40m" #: Black background
Red="\033[41m" #: Red background
Green="\033[42m" #: Green background
Yellow="\033[43m" #: Yellow background
Blue="\033[44m" #: Blue background
Magenta="\033[45m" #: Magenta background
Cyan="\033[46m" #: Cyan background
White="\033[47m" #: White background
# Others : blink and Blink are NOT SUPPORTED BY ALL TERMINAL
blink="\033[05m" #: Make the text blink. NOT SUPPORTED BY ALL TERMINAL. On Windows (with mintty) it's ok. On Linux (with ttys, gnome-terminal or pyterminal, it's not).
Blink="\033[06m" #: Make the text not blink (*i.e.* stop blinking).
# nocolors, then default, then Default
nocolors="\033[0m"
default="\033[39m" #: default foreground
Default="\033[49m" #: default background
italic="\033[3m" #: italic
Italic="\033[23m" #: no italic
b="\033[1m" #: bold
B="\033[2m" #: no bold
u="\033[4m" #: underline
U="\033[24m" #: no underline
neg="\033[7m" #: negative
Neg="\033[27m" #: no negative
# New ones
clear="\033[2J" #: Clear the screen.
el="\r\033[K" #: Clear the current line.
reset="\033[0;39;49m" #: Reset the current foreground and background values to default, and disable all effects.
bell="\007" #: BEL is the bell character (\007). It *might* be interpreted and a sonor signal might be heard (but not with every terminals).
title="\033]0;" #: Use it like : writec("<title>.: My title :.<bell>"), **and only** with ending the sequence with <bell>.
# Not specially balises, but aliases.
warning = "%s%s/!\\%s%s" % (red, u, U, default) #: A well colored Warning symbol (/!\\)
question = "%s%s/?\\%s%s" % (yellow, u, U, default) #: A well colored question symbol (/?\\)
ERROR = "%s%sERROR%s" % (reset, red, default) #: A well colored ERROR word.
WARNING = "%s%sWARNING%s" % (reset, yellow, default) #: A well colored WARNING word.
INFO = "%s%sINFO%s" % (reset, blue, default) #: A well colored INFO word.
#############################################################
#: List of all authorized colours.
colorList=['black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white', 'Bblack', 'Bred', 'Bgreen', 'Byellow', 'Bblue', 'Bmagenta', 'Bcyan', 'Bwhite', 'Black', 'Red', 'Green', 'Yellow', 'Blue', 'Magenta', 'Cyan', 'White', 'Blink', 'blink', 'nocolors', 'default', 'Default', 'italic', 'Italic', 'b', 'B', 'u', 'U', 'neg', 'Neg', 'clear', 'el', 'reset', 'bell', 'title', 'warning', 'question', 'ERROR', 'WARNING', 'INFO']
#: List of all simple colours
simpleColorList=['black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white']
# backup all colours
for n in colorList:
exec('_%s=%s' % (n, n))
# Turn off colour balises interpretation if they are not supported
if not(ANSISupported):
for n in colorList:
exec('%s=\"\"' % n)
def tocolor(string):
"""tocolor(string) -> string
Convert a string to a colour.
[string] **have** to be in [colorList] to be recognized (and interpreted).
Default value if [string] is not one of the colour name is "" the empty string."""
if string in colorList:
res=""
exec('res=%s' % string)
return res
else: return ""
def sprint(chainWithBalises, left='<', right='>', verbose=False):
""" sprint(chainWithBalises, left='<', right='>', verbose=False) -> string
Parse a string containing colour balises, when colour is one of the previous define name,
and then return it, with colour balises changed to concrete ANSI colour codes.
**Balises are delimited** by [left] and [right].
By default, it's Pango style whit '<' and '>', but you can change them.
For example, a HTML style like : left='<span colour=' and right='>' is also possible. (But, without closing '</span', this is a stupid example. Sorry I didn't find anything else...)
.. warning::
It is more prudent to put nothing else than ANSI Colors (*i.e.* values in colorList) between '<' and '>' in [chainWithBalises].
The comportement of the function in case of false balises **is not perfect**.
Moreover, a good idea could be to don't try to use '<' or '>' for anything else than balises.
I know, it's not perfect. But, the syntax of color balises is so simple and se beautiful with this limitation that you will surely forgive me this, *won't you* ;) ?
Example: ::
>>> print sprint("<blue>this is blue.<white>And <this> is white.<red>Now this is red because I am <angry> !<green><white>")
this is blue.And <this> is white.Now this is red because I am <angry> !
*About:*
This function is used in all the following, so all other function can also used *left* and *right* arguments.
"""
ls = chainWithBalises.split(left)
if verbose: print "\tls", ls
lls = list()
for s2 in ls:
if verbose: print "\ts2", s2
inte=s2.split(right)
if verbose: print "\tinte", inte
if inte[0] in colorList: inte[0]=tocolor(inte[0])
else:
if len(inte)>1: inte[0]=left+inte[0]+right
if verbose: print "\tinte", inte
lls.append(inte)
if verbose: print "\t", lls
res=""
for ii in range(len(lls)):
for j in range(len(lls[ii])):
res+=lls[ii][j]
return res
def erase(chainWithBalises, left='<', right='>', verbose=False):
""" erase(chainWithBalises, left='<', right='>', verbose=False) -> string
Parse a string containing colour balises, when colour is one of the previous define name,
and then return it, with colour balises **erased**.
Example:
This example seems exactly the same that the previous in the documentation, but it's not (**again**: it is hard and painful (and maybe impossible) to put colour in Sphinx RST files, so there is **no colour in output** in the examples... but be sure there is the real output !).
>>> print erase("<blue>This is blue.<white>And <this> is white.<red>Now this is red because I am <angry> !<reset>")
This is blue.And <this> is white.Now this is red because I am <angry> !
"""
ls = chainWithBalises.split(left)
if verbose: print "\tls", ls
lls = list()
for s2 in ls:
if verbose: print "\ts2", s2
inte=s2.split(right)
if verbose: print "\tinte", inte
if inte[0] in colorList: inte[0]='' #: Here the 'erasure' is made.
else:
if len(inte)>1: inte[0]=left+inte[0]+right
if verbose: print "\tinte", inte
lls.append(inte)
if verbose: print "\t", lls
res=""
for ii in range(len(lls)):
for j in range(len(lls[ii])):
res+=lls[ii][j]
return res
def printc(chainWithBalises, left='<', right='>'):
""" printc(chainWithBalises, left='<', right='>') -> unit
A shortcut to print sprint(chainWithBalises) : analyse all balises, and print the result."""
print sprint(chainWithBalises, left=left, right=right)
def writec(chainWithBalises="", file=sys.stdout, left='<', right='>', flush=True):
""" writec(chainWithBalises="", file=sys.stdout, left='<', right='>', flush=True) -> unit
Usefud to print colored text **to a file**, represented by the object *file*.
Also usefull to print colored text, but without an ending '\\n' caracter.
Example:
In this example, before the long calcul begin, it print 'Computing 2**(2**(2**4)).....',
and when the computation is done, erase the current line (with <el> balise),
and print ' Done !' in green, and the result of the computation: ::
>>> writec("<red>Computing<reset> 2**(2**(2**4))....."); tmp=2**(2**(2**4)); writec("<el><green>Done !<reset>")
This example show how to use ANSIColors module to put colored data in a file.
Be aware that this file now contains ANSI escape sequences.
For example, *$ cat /tmp/colored-text.txt * will well print the colours, but editing the file will show *hard values* of escape code (*you know, the stuff that you typically don't want to know anything, the **dirty stuff** !*): ::
>>> my_file = open('/tmp/colored-text.txt', mode='w') # Open an adhoc file.
>>> write("<blue>this is blue.<white>And <this> is white.<red>Now this is red because I am <angry> !<green><white>", file=my_file)
Remark:
Can also be used to simply reinitialize the ANSI colors buffer, but the function *Reset* is here for this: ::
>>> writec("<reset>")
.. warning::
The file *file* **will be flushed** by this function if *flush* is set to True (this is default comportement).
If you prefer no to, use flush=False option: ::
>>> writec(chainWithBalises_1), file=my_file, flush=False)
>>> # many things.
>>> writec(chainWithBalises_n), file=my_file, flush=False)
>>> my_file.flush() # only flush *here*.
"""
file.write(sprint(chainWithBalises, left=left, right=right))
if flush: file.flush()
def clearScreen():
""" clearScreen() -> unit
Try to clear the screen using ANSI code [clear]."""
writec("<clear>")
def clearLine():
""" clearLine() -> unit
Try to clear the current line using ANSI code [el]."""
writec("<el>")
def Reset():
""" Reset() -> unit
Try to reset the current ANSI codes buffer, using [reset]."""
writec("<reset>")
####################################
# Other tools for the interface
def notify(msg="", obj=".: Notification sent by ANSIColors.notify :.", icon=None, verb=False):
""" notify(msg='', obj='.: Notification sent by ANSIColors.notify :.', icon=None, verb=False) -> bool
Notification using subprocess and notify-send.
Also print the informations directly to the screen (only if verb=True).
.. warning::
This doesn't use any *ANSI escape* codes, but the common *notify-send* **linux** program.
It shall fails (but not durty) on Windows or Mac OS X.
Return True iff the title have been correctly changed.
Fails simply if *notify-send* is not found.
"""
try:
if icon:
subprocess.Popen(['notify-send', obj, msg, "--icon=%s/%s" % (os.getcwd(), icon)])
if verb: print "/notify/ A notification have been sent, with obj=%s, msg=%s, and icon=%s." % (obj, msg, icon)
else:
subprocess.Popen(['notify-send', obj, msg])
if verb: print "/notify/ A notification have been sent, with obj=%s, and msg=%s." % (obj, msg)
return 0
except Exception as e:
if verb: print "/notify/ notify-send : not-found ! Returned exception is %s." % e
return -1
def xtitle(new_title="", verb=False):
""" xtitle(new_title="", verb=False) -> 0|1
**Modify the current terminal title**.
Returns 0 if one of the two solutions worked, 1 otherwise.
An experimental try is with **ANSI escape code**,
if the simple way by *invoking* the **xtitle** program doesn't work (or if it is not installed).
.. note::
The second solution used the two *ANSI* Balises <title> and <bell>.
So, you can also do it with : ::
>>> ANSIColors.writec("<title>.: This is the new title of the terminal :.<bell>")
But this function *xtitle* is better : it tries two ways, and returns a signal to inform about his success.
"""
try:
subprocess.Popen(['xtitle', new_title])
if verb: print "/xtitle/ The title of the current terminal has been set to '%s'." % new_title
return 0
except Exception as e:
if verb: print "/xtitle/ xtitle : not-found ! Returned exception is %s." % e
try:
writec("<title>%s<bell>" % new_title)
except Exception as e:
if verb: print "/xtitle/ With ANSI escape code <title> and <bell> : failed. ! Returned exception is %s." % e
return 2
return 0
########################
##### Script part ######
########################
# To generate ~/.color.sh with this script,
# use ./ANSIColors.py -g,
def _Generate_color_sh(file_name=None):
""" _Generate_color_sh(file_name=None) -> string | unit.
Used to print or generate (if file_name is present and is a valid URI address)
a profile of all the colours *here* defined.
Print all ANSI Colors as 'export name=value'.
Usefull to auto generate a ~/.color.sh to be used with Bash,
use the command './ANSIColors.sh --generate --file ~/.color.sh',
and now you can simply colorized your Bash script with '. ~/.color.sh' to import all colours.
The file is a list of 'export NAME="VALUE"', to be used with GNU Bash.
"""
from time import sleep
if file_name:
writec("<green> The file %s is creating.<reset> (c) Naereen CORP. 2013.\t" % file_name)
writec("<blue><u>Listing of all ANSI Colors...<reset>")
sleep(0.9)
writec("<el>...")
for s in colorList:
writec("<green><u>%s<reset>..." % s)
sleep(0.1)
writec("<el>...")
writec("<reset>Listing of all ANSI Colors...><red><u> DONE !<reset>...")
sleep(0.9)
writec("<el>")
if file_name:
mfile=open(file_name, 'w')
else:
mfile=sys.stdout
mfile.write("""#!/bin/sh
# From ANSIColors.py module, auto generated with -g option. (*i.e.* the command './ANSIColors.py --generate')
#About the convention for the names of the colours :
# * for the eight colours black, red, green, yellow, blue, magenta, cyan, white:
# * the name in minuscule is for colour **with bold** (example 'yellow'),
# * the name starting with 'B' is for colour **without bold** (example 'Byellow'),
# * the name starting with a capital letter is for the background colour (example 'Yellow').
# * for the special effects (blink, italic, bold, underline, negative), **not always supported** :
# * the name in minuscule is for **activate** the effect,
# * the name starting in capital letter is for **desactivate** the effect.
# * for the other special effects (nocolors, default, Default, clear, el), the effect is **immediate** (and seems to be well supported).
#About:
#======
# Use this script with other GNU Bash scripts, simply by importing him with
# $ . ~/.color.sh
#Copyrigths:
#===========
# (c) 01/2013
# By Lilian BESSON,
# ENS de Cachan (M1 Mathematics & M1 Computer Science MPRI)
# mailto:lbesson@ens-cachan.fr
#
# For Naereen Corp.
# mailto:naereen-corporation@laposte.net
# https:sites.google.com/site/naereencorp
#
#List of colors:
#===============
""")
res = ""
for s in colorList:
exec("res=('%%s' %% %s)" % s.replace('\x1b', '\\\\x1b'))
#: Un excaping special caracters.
res=res.replace('\x1b', '\\033')
res=res.replace('\r', '\\r')
mfile.write("export %s=\"%s\"\n" % (s, (r"%s" % res)))
mfile.write("#DONE\n\n")
if file_name:
writec("<green> The file %s have been creating.<reset> (c) Naereen CORP. 2013.\n" % file_name)
sys.exit(0)
def _run_complete_tests(color_list_tested=colorList):
""" _run_complete_tests(color_list_tested=colorList) -> unit.
Launch a complete test of all ANSI Colors code in the list *color_list_tested*.
"""
printc("Launching full test for ANSI Colors.<default><Default><nocolors> now the text is printed with default value of the terminal...")
for s in color_list_tested:
printc("The colour '%s'\t is used to make the following effect : <%s>!! This is a sample text for '%s' !!<default><Default><nocolors>..." % (s, s, s))
###############
##### End #####
if __name__ == '__main__':
#: Generate the parser, with another module.
#: This variable is the preprocessor, given to description and epilog by ParseCommandArgs,
#: * erase: to print with no colours.
#: * sprint: to print with colours.
preprocessor = sprint if ANSISupported else erase #:preprocessor = __builtin__.str, if you wanna to *see* the balises.
#: Generate the parser, with another module.
parser = _parser_default(\
description='<green>ANSI Colors utility <red>module<reset> and <blue>script<reset>.',\
epilog="""\n\
<yellow>About:
======<reset>
This module is <blue>still in development<reset>.
Last version of this project can be found <green>on-line<reset> :
* here on <neg>BitBucket<Neg> : <u>https://bitbucket.org/lbesson/ansi-colors<U>,
* here on <neg>PyPi<Neg> : <u>https://pypi.python.org/pypi/ANSIColors-balises<U>,
* and his documentation can be found here on <neg>Python Hosted<Neg> : <u>http://pythonhosted.org/ANSIColors-balises/<U>.
The reference page for ANSI code is : <u>http://en.wikipedia.org/wiki/ANSI_escape_code<U>.""", \
version=__version__, date=__date__, author=__author__, \
preprocessor=preprocessor)
#: So, here become the intersting part.
group = parser.add_mutually_exclusive_group()
group.add_argument("-t","--test", help="Launch a complete test of all ANSI Colors code defined here.", action="store_true")
#: Description for the part with '--file' and '--generate' options.
group = parser.add_argument_group('Generation of a GNU Bash colour aliases file', preprocessor("""\
<b>About the <u>convention<U> for the names of the colours :<reset>
* for the eight colours black, red, green, yellow, blue, magenta, cyan, white:
* the name in minuscule is for colour **with bold** (example <yellow>'yellow'<reset>),
* the name starting with 'B' is for colour **without bold** (example <Byellow>'Byellow'<reset>),
* the name starting with a capital letter is for the background colour (example <Yellow>'Yellow'<reset>);
* for the special effects (blink, italic (i), bold (b), underline (u), negative), <u>**not always supported**<reset> :
* the name in minuscule is for <u>**activate**<reset> the effect (example 'u' to <u>underline<U>),
* the name starting in capital letter is for <u>**desactivate**<reset> the effect (example 'U' to stop underline);
* for the other special effects (nocolors, default, Default, clear, el), the effect is <u>**immediate**<reset> (and seems to be well supported).
Use this script with other GNU Bash scripts, simply by importing him with
<b><black> . ~/.color.sh<reset>"""))
group.add_argument("-g","--generate", help="Print all ANSI Colors as 'export name=value'.", action="store_true") #:, required=True)
group.add_argument("-f","--file", help="If present, and with --generate option, don't print the values, but export them in the file FILE.", default=None)
#: The parser is done,
#: Use it to extract the args from the command line.
args = parser.parse_args()
#: Use those args.
if args.generate:
if args.file:
_Generate_color_sh(args.file)
else:
_Generate_color_sh()
sys.exit(0)
if args.test:
_run_complete_tests()
sys.exit(0)
parser.print_help()
sys.exit(1)
##############################################################################
# remove the scripts values here
# FIXME: be sure we removed exactly the good ones
else:
del(_Generate_color_sh)
del(_run_complete_tests)
del(_parser_default)
del(_default_description)
del(_default_epilog)
del(_add_default_options) | ANSIColors-balises | /ANSIColors-balises-1.9.9.public.tar.gz/ANSIColors-balises-1.9.9.public/ANSIColors.py | ANSIColors.py |
```sh
# d8888 888b 888 .d8888b. 8888888
# d88888 8888b 888 d88P Y88b 888
# d88P888 88888b 888 Y88b. 888
# d88P 888 888Y88b 888 "Y888b. 888
# d88P 888 888 Y88b888 "Y88b. 888
# d88P 888 888 Y88888 "888 888
# d8888888888 888 Y8888 Y88b d88P 888
# d88P 888 888 Y888 "Y8888P" 8888888
#
#
#
# .d8888b. 888 888 888
# d88P Y88b 888 888 888
# 888 888 888 888 888
# 888 .d88b. 88888b. 888888 888d888 .d88b. 888 888 .d88b. 888d888
# 888 d88""88b 888 "88b 888 888P" d88""88b 888 888 d8P Y8b 888P"
# 888 888 888 888 888 888 888 888 888 888 888 888 88888888 888
# Y88b d88P Y88..88P 888 888 Y88b. 888 Y88..88P 888 888 Y8b. 888
# "Y8888P" "Y88P" 888 888 "Y888 888 "Y88P" 888 888 "Y8888 888
#
```
# ANSI Controller
> Basic Python Script to control cursor postion in terminal
> and colorize any text in terminal , add any style to graphic mode in terminal
<div style="
color: white;
background-color: #8B0000;
padding: 10px;
alignment: center;
text-align: center;
">
<b>Note</b><br>
! This Module work Depends of Terminal Type of support ANSI escape characters or not !
<hr>
if u face any issue write it
</div>
<br>
# Note:
> #### The Goal of writing this script is to make it easier to control the terminal by simply writing the name of the color or the name of the style of the text or moving the cursor by calling a function or deleting the text in the window, use it to learn how much it will make it easier for you if you are always using the command window
[](https://pypi.org/project/ANSIController "PyPi") [](LICENSE "License")
[More Info About ANSI Escape Codes](`https://gist.github.com/fnky/458719343aabd01cfb17a3a4f7296797#escape`)
## Tool Snap
_______________

## Features
------------------------------
`ANSI Controller` Features:
- Move Cursor Right or left or top or down or postion in terminal screen
- Colorize any text u want in terminal
- Change Style of terminal printing text , bold ,italic , etc...
- MultiProgress in same time
## Tech
------------------------------
`ANSI Controller` uses a number of open source projects to work properly:
- [keyboard] - pypi module `https://pypi.org/project/keyboard/`
- `ANSI Controller` itself is open source on GitHub.
- `More Info About ANSI Escape Codes: `https://gist.github.com/fnky/458719343aabd01cfb17a3a4f7296797#escape
## Installation
------------------------------
`ANSI Controller` requires [pip](https://pypi.org/) to install.
Install the dependencies and devDependencies and start the script.
### Windows
```bash
pip install --upgrade ANSIController
```
### Linux && Termux
```bash
pip3 install --upgrade ANSIController
```
# Table of colors & style
- #### if terminal not support colorize the string will just remove from string
- ### Note: Add Custom Values, to string to colorize the output
> - ### [ID Colors](https://robotmoon.com/256-colors/): `<{id}>` from 0 to 255 , `<255>` , `<15>`
> - ### [RGB Colors](https://www.rapidtables.com/web/color/RGB_Color.html): `(red_value,green_value,blue_value)` from 0 to 255 , `(213,201,250)`
> - ### [Hex RGB](https://www.rapidtables.com/web/color/RGB_Color.html): `#FFFFFF` using hex , `#4affa1`
- #### Background: `X,x`
- #### Reset: `Z,0,reset,Reset`
- #### Colors:
- [+] `black:` `b,black,30`
- [+] `red:` `r,red,31`
- [+] `green:` `g,green,32`
- [+] `yellow:` `y,yellow,33`
- [+] `blue:` `l,blue,34`
- [+] `magenta:` `m,magenta,35`
- [+] `cyan:` `c,cyan,36`
- [+] `white:` `w,white,37`
- [+] `default:` `d,default,39`
- [+] `bright black:` `bb,bblack,90`
- [+] `bright red:` `br,bred,91`
- [+] `bright green:` `bg,bgreen,92`
- [+] `bright yellow:` `by,byellow,93`
- [+] `bright blue:` `bl,bblue,94`
- [+] `bright magenta` `bm,bmagenta,95`
- [+] `bright cyan:` `bc,bcyan,96`
- [+] `bright white:` `bw,bwhite,97`
- #### Styles:
- [+] `bold:` `B,bold,1`
- [+] `dim:` `D,dim,2`
- [+] `italic:` `I,italic,3`
- [+] `underline:` `U,underline,4`
- [+] `blinking:` `L,blinking,5`
- [+] `reverse:` `R,reverse,7`
- [+] `hidden:` `H,hidden,8`
- [+] `strikethrough:` `S,strikethrough,9`
- #### ProgressBar: add `%{char}%`
- [+] `c:` `current progress value`
- [+] `m:` `max progress value`
- [+] `p:` `percent progress value`
- [+] `b:` `bar progress value`
- [+] `f:` `print full bar with all info`
- [+] `e:` `Elapsed Time`
- [+] `r:` `Remaining Time`
- [+] `s:` `Speed`
- [+] `your_custom_key:` `your_custom_value`
- #### More Control in ProgressBar:
- [+] `txt:` `key of string value inside`
- [+] `mx:` `max value default is 100`
- [+] `inc:` `increamnt value defualt is 1`
- [+] `bopen:` `bar open char default '|'`
- [+] `bfill:` `bar filled char default '█'`
- [+] `bafill:` `bar after filled char default ''`
- [+] `bempty:` `bar empty char default ' '`
- [+] `bclose:` `bar close char default ' '`
- [+] `custom:` `dict object with custom keys`
## Examples & Usage
____________
> Terminal Execute
### windows
```shell
python -m ANSIController
```
> OR
```shell
ansicontroller
```
### Linux && Termux
```shell
python3 -m ANSIController
```
> OR
```shell
ansicontroller
```
> Python Code
``` python
from ANSIController import Terminal # Import Needed Class
terminal_control = Terminal() # Create Object From Class Terminal
```
> To see all test
```python
# print all styles with test example
Terminal.print_styles()
# print all colors with test example for background too and codes
Terminal.print_colors()
# print all colors & styles with codes
Terminal.print_colors_styles()
# print ids colors background and normal from 0 to 255
Terminal.print_id_colors()
# will print all pervious in same time
Terminal.print_test()
# Try it
Terminal.game()
```
## Output:



### to move cursor
```python
# this will make cursor move to up 3 lines
terminal_control.move_to_up(steps=3)
# After move to up 3 lines , cursor will start from 0 postion of line
terminal_control.move_to_up(steps=3,start_line=True)
# this will make cursor move to down 1 line
terminal_control.move_to_down(steps=1)
# sometimes terminal not accept to move to up this function will force terminal to move up 1 line
terminal_control.force_move_to_up()
# move cursor to home postion , make cursor in row 0 and col 0
terminal_control.move_to_home_postion()
# move cursor to custom postion row {num} col {num} in terminal screen
# here the cursor will move to row 14 ,column 20
terminal_control.move_to_line(row=14,col=20)
```
### to hide cursor
```python
# hide cursor , try it
terminal_control.hide_cursor()
# show cursor if hidden
terminal_control.show_cursor()
# show cursor if hidden and hide if showen
terminal_control.toggle_cursor()
```
### To save cursor postion or restore cursror postion
```python
terminal_control.save_cursor_postion() # Save Current Cursor postion row , column
# Restore Last Saved Cursor postion
terminal_control.restor_cursor_postion() # cursor will move auto to saved postion
```
### lets say i want to clear some text from terminal `\r no`
```python
terminal_control.clear_screen() # Clear Terminal Screen it close to command `cls` and `clear`
terminal_control.clear_after_cursor() # Clear Terminal Screen all text after cursor postion
terminal_control.clear_before_cursor() # Clear Terminal Screen all text after cursor postion
terminal_control.clear_line() # Clear Current line, of cursor postion row and start from first line
terminal_control.clear_line_after_cursor() # Clear Current line, all text after cursor postion in same line
terminal_control.clear_line_before_cursor() # Clear Current line, all text before cursor postion in same line
```
### lets say i want to colorize some text
`By Using Concept of - Table of colors & style`
> Using Colors only no styles or background
> Colorize function take : text and seprator
>
> syntax:
>
> `sep some_style_codes_or_color_code sep`
>
> for example `sep is []`
>
> syntax will be: `[some_style_codes_or_color_code]`
>
```python
sep = "[]"
colorize_texts_using_color_char = [
"[r]This is Red[0]",
"[g]This is Green[0]",
"[y]This is Yellow[0]",
"[b]This is Black[0]",
"[l]This is Blue[0]",
"[m]This is Megenta[0]",
"[c]This is Cyan[0]",
"[w]This is White[0]",
"[d]This is default[0]",
"[bb]This is Bright Black[0]",
"[br]This is Bright Red[0]",
]
for text in colorize_texts_using_color_char:
print(terminal_control.colorize(text,sep))
```
## Output:

> Now Using style only
>
```python
sep = "[]"
colorize_texts_using_style_char = [
"[B]This is Bold[0]",
"[D]This is Dim[0]",
"[I]This is Italic[0]",
"[U]This is Underline[0]",
"[L]This is Blinking[0]",
"[R]This is reverse[0]",
"[H]This is Hidden[0]",
"[S]This is Strikethrogh[0]",
]
for text in colorize_texts_using_style_char:
print(terminal_control.colorize(text,sep))
```
## Output:

> Now Using Colors & style
>
```python
sep = "[]"
colorize_texts_using_style_color_char = [
"[Br]This is Bold and Red[0]",
"[Dy]This is Dim and Yellow[0]",
"[Il]This is Italic and Blue[0]",
"[Ug]This is Underline and Green[0]",
"[Lb]This is Blinking and black[0]",
"[Rbr]This is reverse and Bright Red[0]",
"[Hby]This is Hidden and Bright Yellow[0]",
"[Sbc]This is Strikethrogh and Bright Cyan[0]",
]
for text in colorize_texts_using_style_color_char:
print(terminal_control.colorize(text,sep))
```
## Output:

> To add Background just add `X,x` to the block `[xrB]` i want background red and bold style
>
> Note: you can use `terminal_control.print_colorize` without print
>
> `[0],[z],[Z],[Reset]` is to reset to default color&style in terminal
>
>
### - Using multiprogressbar
```python
# add_progress: take list of text
# take too dict
#example with list
terminal_control.add_progress([
"[rB]test1[0]",
"[w]test2[0]",
"[cI]test3[0]",
"[yD]test4[0]",
])
# example with dict
# take `progress_name`` to access later
# `txt` key is the progress text
terminal_control.add_progress({
"progress1":{"txt": "[rB]test1[0]"},
"progress2":{"txt": "[w]test2[0]"},
"progress3":{"txt": "[cI]test3[0]"},
"progress4":{"txt": "[yD]test4[0]"},
})
# now if i want to add progress value and update values
#example with list
# access by index
terminal_control.add_progress([
"[rB]test1: (%c%/%m%)[0]",
"[w]test2: %b% (%c%/%m%)[0]",
"[cI]test3: %b% %p% (%c%/%m%)[0]",
"[yD]test4: %f% [0]",
])
#example with more control dict
terminal_control.add_progress({
"progress1":{
"txt": "[rB]test1: (%c%/%m%) - (%key1%,%key2%,%status_test%)[0]",
"mx":200,
"inc":5,
"custom":{
"key1":10,
"key2":"test",
"status_test":"Good"
}
},
# sometimes no need for `mx` or `inc`
"progress2":{
"txt": "[w]test2: %b% (%c%/%m%)- (%key1%,%key2%)[0]",
"custom":{
"key1":10,
"key2":"test",
"status_test":"Good"
}
},
'progress_key1'|progress_key_integar:{
'txt':string...,
'mx':100,
'inc':1,
'custom':{},
'bopen':'|',
'bfill':'█',
'bafill':'',
'bempty':' '
'bclose':'|',
}
})
#----------------------------------------
# now to update progress bar values
# progress_key = `if list will be index`
# progress_key = `if dict will be name`
# `all` argument mean if u want to change in all texts
# default of all is False
# to change max value of custom texts
terminal_control.set_progress_max_value(150,"progress_key")
terminal_control.set_progress_max_value(150,all=True)
# to change auto increment value of custom texts
terminal_control.set_progress_inc_value(5,"progress_key")
terminal_control.set_progress_inc_value(5,all=True)
# to change text value of custom texts
terminal_control.set_progress_text("[rD]This is Text[0]","progress_key")
terminal_control.set_progress_text("[rD]This is Text[0]",all=True)
# to change or add custom value of custom texts
terminal_control.set_custom_value("key1","value1","progress_key")
terminal_control.set_custom_value("key1","value1",all=True)
#----------------------------------------
# now to update progress value
# this function more control in update
terminal_control.update(
value = 13, # if no progress value, leave it
progress_key="progress_key", # if no all, leave it
all=True or False,
custom_values={
"key1":"value1",
"key2":"value2"
}
)
# if u want to auto update using `inc` value just call this
terminal_control.increase_progress("progress_key")
terminal_control.increase_progress(all=True)
#----------------------------------------
# now to print & check progress value
# to print all progress text with colorize mode
terminal_control.print_progress()
# to check is progress finish or not
terminal_control.is_progress_finish("progress_key")
terminal_control.is_progress_finish(all=True)
```
## Output:

## Ref
________________
- https://gist.github.com/fnky/458719343aabd01cfb17a3a4f7296797#escape
________________
## Tests
* ✅ `Windows 11 & 10 & 7`
* work with no issue
* 👍 `Linux`
* work but , maybe issue appear
* Still Work on it
* 🔧 `Termux`
* Some Features Need Rooted Device
* Still Work on it
________________
### `if u face any issue dont be shy , say it`
## License
**MIT License**
**Copyright (c) 2023 [JoOx01]**
`Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.`
| ANSIController | /ANSIController-1.1.1.tar.gz/ANSIController-1.1.1/README.md | README.md |
# ANSIEnum: *Python + ANSI = easy!*
Q) Want a simple way to put ANSI escape codes in your project?
A) Yes? So did I! Here is my solution...
## Description
Allows inline adding of [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) for color and cursor
manipulation.
## Features
- [SGR](https://en.wikipedia.org/wiki/ANSI_escape_code#CSI_(Control_Sequence_Introducer)_sequences) properties for
setting text **FG**/**BG** color (incl **BR**ight) plus effects (these can also be added together):
- FG_*
- FG_BR_*
- BG_*
- BG_BR_*
- UL_, ITAL_, STRIKE_, INVERT_and BLINK_ control
- [CSI](https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_(Select_Graphic_Rendition)_parameters) properties for
clearing screen:
- ERASE_DISP_*
- ERASE_LINE_*
- CSI functions for cursor control (complete list [below](#Cursor functions)):
- cursor_*
*Note:* Complete property (upper case) names are available from the enum with `dir(ANSI)`
## Installation
```shell
$ pip install ANSIEnum
```
## Usage
```python
from ansienum import ANSI
# Simple text color with f-strings:
print(f"{ANSI.FG_RED}Red text!{ANSI.RESET}")
# Codes of the same type can be added:
my_color = ANSI.FG_RED + ANSI.BG_BLUE
print(f"{my_color}Red on blue text!{ANSI.RESET}")
# Move cursor:
print(f"{ANSI.cursor_up(2)}Moved text")
```
### Cursor functions
```python
ANSI.cursor_at(n, m)
ANSI.cursor_up(n)
ANSI.cursor_down(n)
ANSI.cursor_right(n)
ANSI.cursor_left(n)
ANSI.cursor_next_line(n)
ANSI.cursor_prev_line(n)
ANSI.cursor_horiz_abs(n)
```
## Contributing
If you are reading this and want to contribute, please feel free to create an issue. Note that it is a work in progress
and things probably will change, but I intend to keep backward compatibility where possible.
## License
This project is under an [MIT license](LICENSE)
#### (c) 2021 Paul Taylor @paulyt | ANSIEnum | /ANSIEnum-0.2.3.tar.gz/ANSIEnum-0.2.3/README.md | README.md |
from enum import Enum
from typing import Union
__all__ = ["ANSI"]
VERSION = "0.2.3"
class ANSIEnum(Enum):
def __init__(self, n: Union[int, list[int]], control_byte: str, abbr: str = None, description: str = None):
self._n = n
if type(self._n) is list:
for i, t in enumerate(self._n):
if type(t) is not int:
raise TypeError(
"%s sequence item %d: expected int instance, %s found" % (
n, i, type(t).__name__))
elif type(self._n) is not int:
raise TypeError("'%s': expected int instance, %s found" % (self._n, type(self._n).__name__))
self._control_byte = control_byte
self._abbr = None
if abbr is not None:
self._abbr = abbr
if control_byte == "m":
self._abbr = "SGR"
elif self._abbr is None:
self._abbr = "CSI"
self._description = description
@property
def _n_str(self):
if type(self._n) is list:
return ';'.join([str(n) for n in self._n])
else:
return str(self._n)
@property
def info(self):
info = "%s (%s): 'ESC[%s%s'" % (self.name, self._abbr, self._n_str, self._control_byte)
if self._description:
info += " | %s" % self._description
return info
def __add__(self, other):
if self.value[1] != "m" or other.value[1] != "m":
raise TypeError(
"unsupported operand code(s) for +: '%s' and '%s'" % (self.value[1], other.value[1]))
self_n, self_code = self.value
other_n, other_code = other.value
new_name = "%s+%s" % (self.name, other.name)
if type(self_n) is list:
new_n = self_n
if other_n not in self_n:
new_n.append(other_n)
else:
new_n = [self_n, other_n]
return getattr(ANSIEnum("ANSI", {new_name: (new_n, self._control_byte)}), new_name)
def __str__(self):
return "\x1b[%s%s" % (self._n_str, self._control_byte)
def __repr__(self):
return "%s%s" % (self._n_str, self._control_byte)
ANSI = ANSIEnum("ANSI", {
# CSI (Control Sequence Introducer) sequences
"CUR_UP_1": (1, "A", "CUU"),
"cursor_up": lambda n=1: getattr(
ANSIEnum("ANSI", {"CUR_UP_%d" % n: (n, "A", "CUU", "Move cursor up %d line(s)" % n)}),
"CUR_UP_%d" % n),
"CUR_DOWN_1": (1, "B", "CUD"),
"cursor_down": lambda n=1: getattr(
ANSIEnum("ANSI", {"CUR_DOWN_%d" % n: (n, "B", "CUD", "Move cursor down %d line(s)" % n)}),
"CUR_DOWN_%d" % n),
"CUR_FWD_1": (1, "C", "CUF"),
"cursor_right": lambda n=1: getattr(
ANSIEnum("ANSI", {"CUR_FWD_%d" % n: (n, "C", "CUF", "Move cursor right %d column(s)" % n)}),
"CUR_FWD_%d" % n),
"CUR_BACK_1": (1, "D", "CUB"),
"cursor_left": lambda n=1: getattr(
ANSIEnum("ANSI", {"CUR_BACK_%d" % n: (n, "D", "CUB", "Move cursor left %d column(s)" % n)}),
"CUR_BACK_%d" % n),
"CUR_NEXT_LINE": (1, "E", "CNL"),
"cursor_next_line": lambda n=1: getattr(
ANSIEnum("ANSI", {"CUR_NEXT_LINE_%d" % n: (n, "E", "CNL", "Move cursor to first column, %d line(s) down" % n)}),
"CUR_NEXT_LINE_%d" % n),
"CUR_PREV_LINE": (1, "F", "CPL"),
"cursor_prev_line": lambda n=1: getattr(
ANSIEnum("ANSI", {"CUR_PREV_%d" % n: (n, "F", "CPL", "Move cursor to first column, %d line(s) up" % n)}),
"CUR_PREV_%d" % n),
"CUR_HOR_ABS": (1, "G", "CHA", "Cursor Horizontal Absolute, column 1"),
"cursor_horiz_abs": lambda n=1: getattr(
ANSIEnum("ANSI", {"CUR_HOR_ABS_%d" % n: (n, "G", "CHA", "Move cursor to column %d" % n)}),
"CUR_HOR_ABS_%d" % n),
"cursor_at": lambda n=1, m=1: getattr(
ANSIEnum("ANSI", {"CUR_AT_%d_%d" % (n, m): ([n, m], "H", "CUP", "Move cursor to row %d, column %d" % (n, m))}),
"CUR_AT_%d_%d" % (n, m)),
"ERASE_DISP_RIGHT": (0, "J", "EID", "Erase in Display, Cursor to end of screen"),
"ERASE_DISP_LEFT": (1, "J", "EID", "Erase in Display, Start of screen to cursor"),
"ERASE_DISP_ALL": (2, "J", "EID", "Erase in Display, Entire screen (preserve scrollback)"),
"ERASE_DISP_ALLSB": (3, "J", "EID", "Erase in Display, Entire screen including scrollback"),
"ERASE_LINE_RIGHT": (0, "K", "EIL", "Erase in Line, Cursor up to end"),
"ERASE_LINE_LEFT": (1, "K", "EIL", "Erase in Line, Start up to cursor"),
"ERASE_LINE_ALL": (2, "K", "EIL", "Erase in Line, Entire line"),
# SGR (Select Graphic Rendition) parameters
"RESET": (0, "m"),
"ITAL_ON": (3, "m"),
"UL_ON": (4, "m"),
"BLINK_SLOW": (5, "m"),
"BLINK_FAST": (6, "m"),
"INVERT_ON": (7, "m"),
"STRIKE_ON": (9, "m"),
"ITAL_OFF": (23, "m"),
"UL_OFF": (24, "m"),
"BLINK_OFF": (25, "m"),
"INVERT_OFF": (27, "m"),
"STRIKE_OFF": (29, "m"),
"FG_BLACK": (30, "m"),
"FG_RED": (31, "m"),
"FG_GREEN": (32, "m"),
"FG_YELLOW": (33, "m"),
"FG_BLUE": (34, "m"),
"FG_MAGENTA": (35, "m"),
"FG_CYAN": (36, "m"),
"FG_WHITE": (37, "m"),
"FG_DEFAULT": (39, "m"),
"FG_GRAY": (90, "m"),
"FG_BR_RED": (91, "m"),
"FG_BR_GREEN": (92, "m"),
"FG_BR_YELLOW": (93, "m"),
"FG_BR_BLUE": (94, "m"),
"FG_BR_MAGENTA": (95, "m"),
"FG_BR_CYAN": (96, "m"),
"FG_BR_WHITE": (97, "m"),
"BG_BLACK": (40, "m"),
"BG_RED": (41, "m"),
"BG_GREEN": (42, "m"),
"BG_YELLOW": (43, "m"),
"BG_BLUE": (44, "m"),
"BG_MAGENTA": (45, "m"),
"BG_CYAN": (46, "m"),
"BG_WHITE": (47, "m"),
"BG_DEFAULT": (49, "m"),
"BG_GRAY": (100, "m"),
"BG_BR_RED": (101, "m"),
"BG_BR_GREEN": (102, "m"),
"BG_BR_YELLOW": (103, "m"),
"BG_BR_BLUE": (104, "m"),
"BG_BR_MAGENTA": (105, "m"),
"BG_BR_CYAN": (106, "m"),
"BG_BR_WHITE": (107, "m"),
}) | ANSIEnum | /ANSIEnum-0.2.3.tar.gz/ANSIEnum-0.2.3/src/ansienum/__init__.py | __init__.py |
# ANTConnect
Provides access to an ANT CDE
For more details, visit [docs.antcde.io](https://docs.antcde.io/)
## Release notes
### Version 2021.05.1
- Updates Tasks API Calls and includes download files
### Version 2021.04
- Adds Tasks API calls to the SDK
### Version 2020.20.5
- Adds multiple options in column create and column update
### Version 2020.20.2
- Complements update of API on ANTCDE
### Version 2020.18.1
- Added more consistancy
### Version 0.1.3
- Bugfix
### Version 0.1.2
- Bugfix Column Create
### Version 0.1.0
- Initial version | ANTConnect | /ANTConnect-2023.9.3.tar.gz/ANTConnect-2023.9.3/README.md | README.md |
import chunk
from getpass import getuser
import time
import codecs
import json
import base64
# import sys
from datetime import datetime, timedelta
import requests
# from .logger import Logger
class API:
_host = ""
_access_token = ""
_api_version = "1.0"
_authenticated = False
_logger = None
def __init__(self, host: str = "https://api.antcde.io/", logging: bool = False):
self._host = host
self._logging = logging
self._remainingRequests = 10
def login(self, client_id: str, client_secret: str, username: str, password: str) -> bool:
""" Login into ANT"""
self._authenticated = False
self._client_id = client_id
self._client_secret = client_secret
response = self._make_request('oauth/token', 'POST', {
"grant_type": "password",
"username": username,
"password": password,
"client_id": client_id,
"client_secret": client_secret
})
if self._logging:
print('New login call at {}, returned with code {}'.format(datetime.now(), response.status_code))
if response.status_code != 200:
print("The response was: {}".format(response.reason))
return False
else:
parsed_response = response.json()
# print(parsed_response)
if 'access_token' not in parsed_response:
raise SystemError("Please check credentials")
now = datetime.now()
self._access_token = parsed_response['access_token']
self._refresh_token = parsed_response['refresh_token']
self._expires_at = now + timedelta(seconds=parsed_response['expires_in'])
self._authenticated = True
user = self.getUserInfo()
self._licenses = user['licenses']
if user['two_factor_enabled']:
two_fa_validated = False
while not two_fa_validated:
code = input("Provide your 2FA code: ")
two_fa_validated = self.twoFactor(code)
return True
def twoFactor(self, code: str):
body = {"code": str(code)}
response = self._make_api_request('2fa/verify', 'POST', body)
validated = False
try:
validated = response['status'] == 'success'
except ValueError:
print("Your code was invalid, try it again")
except:
print("Your code was invalid, try it again")
return validated
def getUserInfo(self):
return self._make_api_request('user', 'GET')
def _make_api_request(self, path: str, method: str,
parameters: dict = None, delete_data: dict = None) -> dict:
parameters = {} if parameters is None else parameters
if not self._authenticated:
raise SystemError("You are not authenticated, please use login first.")
if datetime.now() >= self._expires_at:
print('Unauthorised, we try to refresh token')
self.refresh_token()
if not self._authenticated:
return False
data = parameters if method in ['GET', 'DELETE'] else json.dumps(
parameters)
url = 'api/{}/{}'.format(self._api_version, path)
# If rate limit is not reached
if self._remainingRequests == 0:
remaining_seconds = (self._RateLimitRefreshAt - datetime.now()).total_seconds()
if remaining_seconds > 0:
if self._logging:
print('Sleeping {} seconds, API rate limit reached'.format(remaining_seconds))
time.sleep(remaining_seconds)
response = self._make_request(
url,
method,
data,
{
"Content-Type": "application/json",
"Accept": "application/json",
"Authorization": "Bearer {}".format(
self._access_token)
},
delete_data)
# Check if still authenticated
if response.status_code == 401:
self._authenticated = False
self._access_token = ""
self._refresh_token = ""
self._expires_at = ""
return False
if response.status_code == 500:
return False
if response.status_code == 400:
print("An error occured: {}".format(response.json()['message']))
return False
# set new time
if not 'x-ratelimit-remaining' in response.headers:
response.headers[
'x-ratelimit-remaining'] = 40 # Set the value to a fictional number to continue with the loop
print('x-ratelimit-remainig not found in API call: {}'.format(url))
if int(self._remainingRequests) < int(response.headers['x-ratelimit-remaining']):
self._RateLimitRefreshAt = datetime.now() + timedelta(seconds=60)
print('Reset time to: {}'.format(self._RateLimitRefreshAt))
self._remainingRequests = int(response.headers['x-ratelimit-remaining'])
if self._logging:
print('New API call at {}, returned with code {}'.format(datetime.now(), response.status_code))
if int(response.headers['x-ratelimit-remaining']) < 10:
print('Warning, you are reaching the API_rate_limit, {} calls left this minute'.format(
response.headers['x-ratelimit-remaining']))
if response.status_code == 401:
print('You are not authenticated for this API call')
return False
if response.text == '':
print("response was empty")
return ''
# print(response.text)
parsed_response = response.json()
if 'message' in parsed_response:
if parsed_response['message'] == 'Unauthenticated.':
raise PermissionError('Unauthenticated')
if parsed_response['message'] == "Too Many Attempts.":
raise ProcessLookupError("Too many requests attempted")
return parsed_response
def _make_request(self, path: str, method: str, parameters: dict = None,
headers: dict = None, data: dict = None) -> requests.Response:
parameters = {} if parameters is None else parameters
headers = {} if headers is None else headers
url = '{}{}'.format(self._host, path)
if method == 'GET':
return requests.get(
url, params=parameters, headers=headers, verify=True)
if method == 'PUT':
return requests.put(
url, data=parameters, headers=headers, verify=True)
if method == 'DELETE':
return requests.delete(
url, data=json.dumps(data), params=parameters, headers=headers, verify=True)
if method == 'POST':
return requests.post(
url, data=parameters, headers=headers, verify=True)
raise NotImplementedError("http method not implemented")
def refresh_token(self):
body = {'grant_type': 'refresh_token', 'refresh_token': self._refresh_token, 'client_id': self._client_id,
'client_secret': self._client_secret, 'scope': ''}
url = '{}oauth/token'.format(self._host)
response = requests.post(url, data=body)
if self._logging:
print('New login call at {}, returned with code {}'.format(datetime.now(), response.status_code))
if response.status_code != 200:
print('something went wrong with receiving new access_token')
self._authenticated = False
else:
now = datetime.now()
parsed_response = response.json()
self._access_token = parsed_response['access_token']
self._refresh_token = parsed_response['refresh_token']
self._expires_at = now + timedelta(seconds=parsed_response['expires_in'])
print('token successfully refreshed')
def projects_read(self):
""" List all your projects"""
path = 'projects'
return self._make_api_request(path, 'GET')
def project_create(self, licenseid: str, name: str, number: str = '', description: str = '', imageName: str = '',
imageExtension: str = '', imageData: str = '') -> dict:
""" Create a new project """
path = 'project'
if (imageExtension == ''):
project = {
"name": name,
"number": number,
"description": description,
"license": licenseid,
}
else:
project = {
"name": name,
"number": number,
"description": description,
"license": licenseid,
"image": {
"name": imageName,
"extension": imageExtension,
"data": imageData
}
}
return self._make_api_request(path, 'POST', project)
def project_read(self, project_id: str) -> dict:
""" Get project details """
path = 'project/{}'.format(project_id)
return self._make_api_request(path, 'GET')
def project_Update(self, project_id: str, name: str) -> dict:
""" Get project update """
path = 'project/{}'.format(project_id)
return self._make_api_request(path, 'PUT', {
"name": name
})
def project_delete(self, project_id: str) -> dict:
""" Get project delete """
path = 'project/{}'.format(project_id)
return self._make_api_request(path, 'DELETE')
def tables_read(self, project_id: str):
""" Get tables in a project """
path = 'tables'
return self._make_api_request(path, 'GET', {
"project[id]": project_id
})
def table_create(self, project_id: str, name: str) -> dict:
""" Create a table in a project """
path = 'table'
return self._make_api_request(path, 'POST', {
"project": {"id": project_id},
"name": name
})
def table_read(self, project_id: str, table_id: str) -> dict:
""" Get details of a table in a project """
path = 'table/{}'.format(table_id)
return self._make_api_request(path, 'GET', {
"project[id]": project_id
})
def table_update(self, project_id: str, table_id: str, name: str) -> dict:
""" Update a table in a project """
path = 'table/{}'.format(table_id)
return self._make_api_request(path, 'PUT', {
"project": {"id": project_id},
"name": name
})
def table_delete(self, project_id: str, table_id: str) -> dict:
""" Delete a table in a project """
path = 'table/{}'.format(table_id)
return self._make_api_request(path, 'DELETE', {
"project[id]": project_id
})
def tables_query(self, project_id: str, queryBody):
""" Get all columns in a table """
path = 'project/{}/tables/query'.format(project_id)
return self._make_api_request(path, 'POST', queryBody)
def columns_read(self, project_id: str, table_id: str):
""" Get all columns in a table """
path = 'columns'
return self._make_api_request(path, 'GET', {
"project[id]": project_id,
"table[id]": table_id
})
def column_create(self, project_id: str, table_id: str, name: str,
fieldType: str, defaultValue: str = "",
options: list = None, required: bool = True, ordinal: int = "") -> dict:
""" Create a column in a table """
options = [] if options is None else options
path = 'column'
return self._make_api_request(path, 'POST', {
"project": {"id": project_id},
"table": {"id": table_id},
"name": name,
"type": fieldType,
"options_value": options,
"default": defaultValue,
"required": required,
"ordinal": ordinal
})
def column_read(self, project_id: str, table_id: str, column_id):
""" Get details for a specific column in a table """
path = 'column/{}'.format(column_id)
return self._make_api_request(path, 'GET', {
"project[id]": project_id,
"table[id]": table_id
})
def column_update(self, project_id: str, table_id: str, column_id: str,
name: str, defaultValue: str = "",
options: list = None, required: bool = True, ordinal: int = 0) -> dict:
""" Update details for a specific column in a table """
path = 'column/{}'.format(column_id)
return self._make_api_request(path, 'PUT', {
"project": {"id": project_id},
"table": {"id": table_id},
"name": name,
"required": required,
"options": options,
"default": defaultValue,
"ordinal": ordinal
})
def column_delete(self,
project_id: str, table_id: str, column_id: str) -> dict:
""" Delete column in a table """
path = 'column/{}'.format(column_id)
return self._make_api_request(path, 'DELETE', {
"project[id]": project_id,
"table[id]": table_id
})
def records_create_csv(self, project_id: str, table_id: str,
records_csv: str, session: str = ""):
""" Import a csv file into a table """
path = 'records/import'
with codecs.open(records_csv, mode="r", encoding='utf-8') as csv_file:
encoded_csv = base64.b64encode(str.encode(csv_file.read()))
result = self._make_api_request(path, 'POST', {
"project": {"id": project_id},
"table": {"id": table_id},
"session": {"id": session},
"records": encoded_csv.decode("utf-8")
})
return result
def records_create(self, project_id: str, table_id: str,
records: list, session: str = ""):
""" Create multiple records into a table """
path = 'records/import'
encoded_csv = base64.b64encode(self.create_virtual_csv(records).encode("utf-8"))
result = self._make_api_request(path, 'POST', {
"project": {"id": project_id},
"table": {"id": table_id},
"session": {"id": session},
"records": encoded_csv.decode("utf-8")
})
return result
def records_import(self, project_id: str, table_id: str,
records: list, session: str = ""):
""" Create multiple records into a table """
path = 'records/import'
encoded_csv = base64.b64encode(self.create_virtual_csv_Addid(records).encode("utf-8"))
result = self._make_api_request(path, 'POST', {
"project": {"id": project_id},
"table": {"id": table_id},
"session": {"id": session},
"records": encoded_csv.decode("utf-8")
})
return result
def records_read_chunk(self, project_id: str, table_id: str, limit: int = 0, offset: int = 0,
session: str = "") -> dict:
""" Get reords of table """
path = 'records'
record_data = self._make_api_request(path, 'GET', {
"project[id]": project_id,
"table[id]": table_id,
"filter[limit]": limit,
"filter[offset]": offset,
"filter[session]": session,
})
return record_data
def records_read(self, project_id: str, table_id: str, limit: int = 0, offset: int = 0, session: str = "",
chunk_size: int = 10000) -> dict:
""" Get reords of table """
record_data = self.records_read_chunk(project_id, table_id, chunk_size, offset, session)
if (limit == 0 or limit > chunk_size):
temp_limit = chunk_size
if len(record_data['records']) < temp_limit:
return record_data['records']
else:
if 'metadata' in record_data:
chunks = (record_data['metadata']['count'] - offset) // temp_limit
if self._logging:
print(
"Total table is bigger ({}) than chunksize({}), splitting up in: {} additional calls".format(
record_data['metadata']['count'] - offset, temp_limit, chunks))
all_records = record_data['records']
for i in range(1, chunks + 1):
temp_offset = offset + (i * temp_limit)
record_data = self.records_read_chunk(project_id, table_id, temp_limit, temp_offset, session)
if 'message' in record_data.keys():
print(record_data['message'])
else:
all_records = all_records + record_data['records']
return all_records
else:
temp_limit = limit
return record_data['records']
def records_search(self, projectId: str, tableId: str, searchFields: list, searchPrase: str = "", offset: int = 0,
limit: int = 0, session: str = "", chunk_size: int = 10000):
"""Search in the records"""
body = {
"project": {"id": projectId},
"table": {"id": tableId},
"search": {"phrase": searchPrase},
"searchfields": searchFields,
"session": {"id": session},
}
record_data = self.search_chunk(body, chunk_size, offset)
if (limit == 0 or limit > chunk_size):
temp_limit = chunk_size
if len(record_data['records']) < temp_limit:
return record_data['records']
else:
if 'metadata' in record_data:
chunks = (record_data['metadata']['count'] - offset) // temp_limit
if self._logging:
print(
"Total table is bigger ({}) than chunksize({}), splitting up in: {} additional calls".format(
record_data['metadata']['count'] - offset, temp_limit, chunks))
all_records = record_data['records']
for i in range(1, chunks + 1):
temp_offset = offset + (i * temp_limit)
record_data = self.search_chunk(body, chunk_size, temp_offset)
all_records = all_records + record_data['records']
return all_records
else:
temp_limit = limit
return record_data['records']
def records_at_moment(self, projectId: str, tableId: str, timestamp: int, session: str = "", offset: int = 0,
limit: int = 0, chunk_size: int = 10000):
"""Search in the records"""
body = {
"project": {"id": projectId},
"table": {"id": tableId},
"timestamp": timestamp,
"session": {"id": session},
}
record_data = self.search_chunk(body, chunk_size, offset)
if (limit == 0 or limit > chunk_size):
temp_limit = chunk_size
if len(record_data['records']) < temp_limit:
return record_data['records']
else:
if 'metadata' in record_data:
chunks = (record_data['metadata']['count'] - offset) // temp_limit
if self._logging:
print(
"Total table is bigger ({}) than chunksize({}), splitting up in: {} additional calls".format(
record_data['metadata']['count'] - offset, temp_limit, chunks))
all_records = record_data['records']
for i in range(1, chunks + 1):
temp_offset = offset + (i * temp_limit)
record_data = self.search_chunk(body, chunk_size, temp_offset)
all_records = all_records + record_data['records']
return all_records
else:
temp_limit = limit
return record_data['records']
def records_search_exact(self, projectId: str, tableId: str, searchFields: list, searchExact: str = "",
limit: int = 0, offset: int = 0, session: str = "", chunk_size: int = 10000):
"""Search in the records"""
body = {
"project": {"id": projectId},
"table": {"id": tableId},
"search": {"exact": searchExact},
"searchfields": searchFields,
"session": {"id": session}
}
record_data = self.search_chunk(body, chunk_size, offset)
if (limit == 0 or limit > chunk_size):
temp_limit = chunk_size
if len(record_data['records']) < temp_limit:
return record_data['records']
else:
if 'metadata' in record_data:
chunks = (record_data['metadata']['count'] - offset) // temp_limit
if self._logging:
print(
"Total table is bigger ({}) than chunksize({}), splitting up in: {} additional calls".format(
record_data['metadata']['count'] - offset, temp_limit, chunks))
all_records = record_data['records']
for i in range(1, chunks + 1):
temp_offset = offset + (i * temp_limit)
record_data = self.search_chunk(body, chunk_size, temp_offset)
all_records = all_records + record_data['records']
return all_records
else:
temp_limit = limit
return record_data['records']
def records_search_by_range(self, projectId: str, tableId: str, searchFields: list, min: int = None,
max: int = None, limit: int = 0, offset: int = 0, session: str = "",
chunk_size: int = 10000):
"""Search in the records"""
search = {}
if min is not None and max is not None:
search = {"min": min, "max": max}
if min is not None and max is None:
search = {"min": min}
if max is not None and min is None:
search = {"max": max}
body = {
"project": {"id": projectId},
"table": {"id": tableId},
"search": search,
"searchfields": searchFields,
"session": {"id": session},
}
record_data = self.search_chunk(body, chunk_size, offset)
if (limit == 0 or limit > chunk_size):
temp_limit = chunk_size
if len(record_data['records']) < temp_limit:
return record_data['records']
else:
if 'metadata' in record_data:
chunks = (record_data['metadata']['count'] - offset) // temp_limit
if self._logging:
print(
"Total table is bigger ({}) than chunksize({}), splitting up in: {} additional calls".format(
record_data['metadata']['count'] - offset, temp_limit, chunks))
all_records = record_data['records']
for i in range(1, chunks + 1):
temp_offset = offset + (i * temp_limit)
record_data = self.search_chunk(body, chunk_size, temp_offset)
all_records = all_records + record_data['records']
return all_records
else:
temp_limit = limit
return record_data['records']
def search_chunk(self, body, chunk_size, offset):
body['filter'] = object()
body['filter'] = {"limit": chunk_size, "offset": offset}
# print(body)
return self._make_api_request('search', 'POST', body)
def records_by_revision(self, projectId: str, tableId: str, revisionId: str):
"""Get records of revision"""
path = 'search'
package = {
"project": {"id": projectId},
"table": {"id": tableId},
"revision": revisionId
}
return self._make_api_request(path, 'POST', package)
def records_delete(self, project_id: str, table_id: str,
records_ids: list) -> dict:
""" Delete records in table """
path = 'records'
data = {
"project": {
"id": project_id
},
"table": {
"id": table_id
},
"records": records_ids
}
return self._make_api_request(path, 'DELETE', delete_data=data)
def records_verify_csv(self, project_id: str, table_id: str, records_csv: str) -> dict:
""" Verify structure of CSV file against a table """
path = 'records/verify'
with codecs.open(records_csv, mode="r", encoding='utf-8') as csv_file:
encoded_csv = base64.b64encode(str.encode(csv_file.read()))
result = self._make_api_request(path, 'POST', {
"project": {"id": project_id},
"table": {"id": table_id},
"records": encoded_csv.decode("utf-8")
})
return result
def records_verify(self, project_id: str, table_id: str, records: list) -> dict:
""" Verify structure of records against a table """
path = 'records/verify'
encoded_csv = base64.b64encode(self.create_virtual_csv(records).encode("utf-8"))
result = self._make_api_request(path, 'POST', {
"project": {"id": project_id},
"table": {"id": table_id},
"records": encoded_csv.decode("utf-8")
})
return result
def record_create(self, project_id: str, table_id: str,
record_values: dict, session: str = "") -> dict:
""" Create a single record into a table """
path = 'record'
return self._make_api_request(path, 'POST', {
"project": {"id": project_id},
"table": {"id": table_id},
"session": {"id": session},
"record": record_values
})
def record_read(self, project_id: str, table_id: str,
record_id: str) -> dict:
""" Read a specific record of a table """
path = 'record/{}'.format(record_id)
return self._make_api_request(path, 'GET', {
"project[id]": project_id,
"table[id]": table_id
})
def record_update(self, project_id: str, table_id: str, record_id: str,
updated_record_values: dict, session: str = "") -> dict:
""" Update a specific record of a table """
path = 'record/{}'.format(record_id)
return self._make_api_request(path, 'PUT', {
"project": {"id": project_id},
"table": {"id": table_id},
"session": {"id": session},
"record": updated_record_values
})
def record_delete(self, project_id: str, table_id: str,
record_id: str) -> dict:
""" Delete a specific record of a table """
path = 'record/{}'.format(record_id)
return self._make_api_request(path, 'DELETE', {
"project[id]": project_id,
"table[id]": table_id
})
def record_history(self, project_id: str, table_id: str,
record_id: str) -> dict:
""" Get change record history a specific record of a table """
path = 'record/history/{}'.format(record_id)
return self._make_api_request(path, 'GET', {
"project[id]": project_id,
"table[id]": table_id
})
def revisions_read(self, project_id: str, table_id: str) -> dict:
""" Get all revisions of a table """
path = 'revisions'
return self._make_api_request(path, 'GET', {
"project[id]": project_id,
"table[id]": table_id
})
def revision_create(self, project_id: str, table_id: str,
reason: str) -> dict:
""" Create a new revisions for a table """
path = 'revision'
return self._make_api_request(path, 'POST', {
"project": {"id": project_id},
"table": {"id": table_id},
"reason": reason,
"timestamp": time.time()
})
def revision_read(self, project_id: str, table_id: str,
revision_id: str) -> dict:
""" Get details of a revisions for a table """
path = 'revision/{}'.format(revision_id)
return self._make_api_request(path, 'GET', {
"project[id]": project_id,
"table[id]": table_id
})
def revision_update(self, project_id: str, table_id: str,
revision_id: str, reason: str) -> dict:
""" Update a revision for a table """
path = 'revision/{}'.format(revision_id)
return self._make_api_request(path, 'PUT', {
"project": {"id": project_id},
"table": {"id": table_id},
"reason": reason,
"timestamp": time.time()
})
def revision_delete(self: str, project_id: str, table_id: str,
revision_id: str) -> dict:
""" Delete a revision for a table """
path = 'revision/{}'.format(revision_id)
return self._make_api_request(path, 'DELETE', {
"project[id]": project_id,
"table[id]": table_id
})
def upload_document(self, project_id: str, table_id: str, column_name: str, document_location,
document_title: str = None, session: str = ""):
""" Upload a document to a table. Creates a new record """
if document_title is None:
document_title = document_location.split("/")[-1]
ext = document_title.split(".")[-1]
path = 'record'
with open(document_location, "rb") as image_file:
encoded_file = base64.b64encode(image_file.read())
dataset = {
"project": {"id": project_id},
"table": {"id": table_id},
"record": {
column_name: {
"name": document_title,
"extension": ext,
"data": encoded_file.decode("utf-8")
}
},
"session": {"id": session}
}
res = self._make_api_request(path, 'POST', dataset)
if 'id' in res:
return res
else:
return "Error"
def attach_document(self, project_id: str, table_id: str, column_name: str, record_id: str, document_location,
document_title: str = None, session: str = ""):
""" Upload a document to an existing record. """
if document_title is None:
document_title = document_location.split("/")[-1]
ext = document_location.split(".")[-1]
path = 'record/{}'.format(record_id)
with open(document_location, "rb") as image_file:
encoded_file = base64.b64encode(image_file.read())
dataset = {
"project": {"id": project_id},
"table": {"id": table_id},
"record": {
column_name: {
"name": document_title,
"extension": ext,
"data": encoded_file.decode("utf-8")
}
},
"session": {"id": session}
}
# print(dataset)
res = self._make_api_request(path, 'PUT', dataset)
# print(res)
if 'id' in res:
return res
elif 'message' in res:
return res['message']
else:
return "Error"
def download_document(self, project_id: str, table_id: str, document_id: str, file_location: str,
file_name: str = None):
""" Download a document. Specify save location and filename """
path = 'record/document/{}'.format(document_id)
response = self._make_api_request(path, 'GET', {
"project[id]": project_id,
"table[id]": table_id
}, '')
if 'file' in response:
if file_name is None:
file_name = '{}.{}'.format(response['name'], response['extension'])
content = base64.b64decode(response['file'])
try:
file = open('{}/{}'.format(file_location, file_name), 'wb+')
file.write(content)
file.close()
except Exception as ex:
print('Error saving file: {}'.format(ex))
# tasks
## new -> license recommended
def tasks_read(self, license: str = "", project_id: str = "", status: str = "", user: str = "",
today: bool = False) -> list:
""" Get tasks"""
depreciationMessage("param", "status", "01-02-2023", "taken")
# license = selectLicense(license, self._licenses)
path = 'tasks?filter[license]={}&filter[today]={}'.format(license, int(today))
filters = []
if project_id != "":
filters.append({"column": "project", "operator": "=", "values": [project_id]})
if status != "":
filters.append({"column": "status", "operator": "=", "values": [status]})
if user != "":
filters.append({"column": "assigned_to", "operator": "=", "values": [user]})
return self._make_api_request(path, 'POST', {
"advanced_filters": filters
})
def task_create(self, project_id: str, name: str, description: str, status: str, due_date: str, assigned_user: str,
start_date: str = "", appendix: object = {}, license: str = "", end_date: str = "") -> dict:
""" Create a task in a project """
path = 'task-create'
license = selectLicense(license, self._licenses)
depreciationMessage("param", "status", "01-02-2023", "taken")
body = {
# required
"license": license,
"project": project_id,
"title": name,
"assigned_to": assigned_user,
"due": due_date,
# optional
"description": description,
"planned_start": start_date,
"planned_end": end_date,
}
if appendix != {}:
print("add appendix")
return self._make_api_request(path, 'POST', body)
def task_read(self, task_id: str) -> dict:
""" Get details of a task"""
path = 'tasks/{}'.format(task_id)
return self._make_api_request(path, 'GET', {})
def task_update_name(self, task_id: str, name: str) -> dict:
""" Update a task name"""
path = 'tasks/{}'.format(task_id)
return self._make_api_request(path, 'PUT', {
"title": name
})
def task_respond(self, task_id: str, response: str, assigned_user: str = "", status: str = "", due_date: str = "",
appendix: object = {}) -> dict:
""" Respond to a task"""
path = 'tasks/{}/message'.format(task_id)
# Depreciation messages -> Moved to update_task
if assigned_user != "":
depreciationMessage("param", "assigned_user", "01-03-2023", "taken")
self.update_task(assigned_to=assigned_user)
if status != "":
# Status = closed?
depreciationMessage("param", "status", "01-02-2023", "taken")
# self.update_task(assigned_user=assigned_user)
if due_date != "":
depreciationMessage("param", "due_date", "01-03-2023", "taken")
self.update_task(due_date=due_date)
if appendix != {}:
depreciationMessage("param", "due_date", "01-03-2023", "taken")
return self._make_api_request(path, 'POST', {"message": response})
def task_close(self, task_id: str):
path = 'tasks/{}/close'.format(task_id)
return self._make_api_request(path, 'POST')
def task_cancel(self, task_id: str):
path = 'tasks/{}/cancel'.format(task_id)
return self._make_api_request(path, 'POST')
def update_task(self, task_id: str, title: str = "", description: str = "", priority: str = "",
planned_start: str = "", planned_end: str = "", assigned_to: str = "", due_date: str = "",
sbs_code: str = "") -> dict:
""" update a task info"""
path = "tasks/{}".format(task_id)
body = {}
if title != "":
body['title'] = title
if description != "":
body['description'] = description
if priority != "":
body['priority'] = priority
if planned_start != "":
body['planned_start'] = planned_start
if planned_end != "":
body['planned_end'] = planned_end
if assigned_to != "":
body['assigned_to'] = assigned_to
if due_date != "":
body['due_date'] = due_date
if sbs_code != "":
body['sbs_code'] = sbs_code
return self._make_api_request(path, 'PUT', body)
def task_upload_appendix(self, task_id: str, appendix: dict):
path = "tasks/{}/appendixes".format(task_id)
return self._make_api_request(path, 'POST', appendix)
def task_message_create(self, task_id: str, message: str):
path = "tasks/{}/messages".format(task_id)
return self._make_api_request(path, 'POST', {"message": message})
def task_delete(self, task_id: str) -> dict:
""" Delete a task"""
path = 'tasks/{}'.format(task_id)
return self._make_api_request(path, 'DELETE', {})
def task_getJob(self, project_id: str, task_id: str) -> dict:
"""Get the job associated to the given task"""
path = 'project/{}/task/{}/job'.format(project_id, task_id)
return self._make_api_request(path, 'GET', {})
## CustomFunctions
def record_update_withdocument(self, project_id: str, table_id: str, record_id: str, updated_record_values: dict,
document_column_name: str, document_location, document_title: str = None) -> dict:
"""Update record with a document"""
path = 'record/{}'.format(record_id)
if document_title is None:
document_title = document_location.split("/")[-1]
ext = document_location.split(".")[-1]
with open(document_location, "rb") as image_file:
encoded_file = base64.b64encode(image_file.read())
updated_record_values[document_column_name] = {
"name": document_title,
"extension": ext,
"data": encoded_file.decode("utf-8")
}
return self._make_api_request(path, 'PUT', {
"project": {"id": project_id},
"table": {"id": table_id},
"record": updated_record_values
})
def create_virtual_csv(self, records: list):
"""Not for use. Create a virtual CSV of records"""
encoded_csv = ",".join(records[0].keys()) + "\n"
for record in records:
recs = []
for key in record.keys():
recs.append(record[key])
encoded_csv += "\"" + "\",\"".join(recs) + "\"\n"
return encoded_csv
def create_virtual_csv_Addid(self, records: list):
"""Not for use. Create a virtual CSV of records"""
encoded_csv = "id," + ",".join(records[0].keys()) + "\n"
for record in records:
recs = []
for key in record.keys():
recs.append(record[key])
encoded_csv += "," + ",".join(recs) + "\n"
return encoded_csv
def parse_document(self, documentLocation, documentTitle: str = None):
"""Parse a document to the ANT Format."""
if documentTitle is None:
documentTitle = documentLocation.split("/")[-1]
ext = documentTitle.split(".")[-1]
with open(documentLocation, "rb") as image_file:
encoded_file = base64.b64encode(image_file.read())
document = {
"name": documentTitle.replace(f'.{ext}', ''),
"extension": ext,
"data": encoded_file.decode('utf-8')
}
return document
def parse_date(self, year: int, month: int, day: int, hour: int, minute: int, seconds: int):
"""Parse a date to the ANT Format."""
date = str(year + "-" + month + "-" + day + " " + hour + ":" + minute + ":" + seconds)
return date
def task_download(self, task_id: str, document_id: str, file_location: str, file_name: str = None):
""" Download a document. Specify save location and filename """
path = 'task/document/{}'.format(document_id)
response = self._make_api_request(path, 'GET', {"task[id]": task_id})
if 'file' in response[0]:
if file_name is None:
file_name = '{}.{}'.format(response[0]['name'], response[0]['extension'])
content = base64.b64decode(response[0]['file'])
try:
file = open('{}/{}'.format(file_location, file_name), 'wb+')
file.write(content)
file.close()
return True
except Exception as ex:
print('Error saving file: {}'.format(ex))
return False
def job_finish(self, project_id: str, job_id: str) -> dict:
""" Finish job (workflow task)"""
path = 'project/{}/job/{}/finish'.format(project_id, job_id)
return self._make_api_request(path, 'POST', {})
# SBS Codes
def sbs_codes(self, project_id: str) -> dict:
""" Get all SBS codes """
path = 'project/{}/sbs'.format(project_id)
return self._make_api_request(path, 'GET', {})
def sbs_getTree(self, project_id: str) -> dict:
""" Get SBS first objects in tree"""
path = 'project/{}/sbs-tree'.format(project_id)
return self._make_api_request(path, 'GET', {})
def sbs_search(self, project_id: str, value: str) -> dict:
""" Search sbs objects by code or label """
path = 'project/{}/sbs-search?value={}'.format(project_id, value)
return self._make_api_request(path, 'GET', {})
def sbs_addCode(self, project_id: str, code: str, parentCode: str = "", label: str = "") -> dict:
""" Add SBS Code """
path = 'project/{}/sbs'.format(project_id)
return self._make_api_request(path, 'POST', {
"code": code, "parent": parentCode, "label": label})
def sbs_updateParent(self, project_id: str, sbsId: str, parent: str) -> dict:
""" Update the parent of the SBSCode """
path = 'project/{}/sbs/{}'.format(project_id, sbsId)
return self._make_api_request(path, 'PUT', {
"parent": parent})
def sbs_updateLabel(self, project_id: str, sbsId: str, label: str) -> dict:
""" Update the label of the SBS Code """
path = 'project/{}/sbs/{}'.format(project_id, sbsId)
return self._make_api_request(path, 'PUT', {
"label": label})
def sbs_removeCode(self, project_id: str, sbsId: str) -> dict:
""" Remove the SBSCode """
path = 'project/{}/sbs/{}'.format(project_id, sbsId)
return self._make_api_request(path, 'DELETE', {})
def sbs_import(self, project_id: str, records: list) -> dict:
""" Create multiple sbs records into a table """
path = 'project/{}/sbs-import'.format(project_id)
encoded_csv = base64.b64encode(self.create_virtual_csv_Addid(records).encode("utf-8"))
result = self._make_api_request(path, 'POST', {
"records": encoded_csv.decode("utf-8")
})
return result
def sbs_children(self, project_id: str, sbs_id: str) -> dict:
""" Get SBS Object children """
path = 'project/{}/sbs/{}/children'.format(project_id, sbs_id)
return self._make_api_request(path, 'GET', {})
def sbs_multi_delete(self, project_id: str, records: list) -> dict:
""" Delete multiple sbs records from table """
path = 'project/{}/sbs'.format(project_id)
body = {
"records": records
}
result = self._make_api_request(path, 'DELETE', delete_data=body)
return result
# WorkFlows
def project_workflows(self, project_id: str) -> dict:
""" Get all workflows in project"""
path = "project/{}/workflows".format(project_id)
return self._make_api_request(path, 'GET', {})
def project_workflows_inLicense(self, project_id: str) -> dict:
"""Returns the workflows which are in project license"""
path = "project/{}/workflows/inLicense".format(project_id)
return self._make_api_request(path, 'GET', {})
def project_workflow_details(self, project_id: str, projectWorkflowId: str) -> dict:
"""Returns the project workflow relation"""
path = "project/{}/workflow/{}".format(project_id, projectWorkflowId)
return self._make_api_request(path, 'GET', {})
def project_workflow_add(self, project_id: str, workflow_id: str, name: str) -> dict:
"""Adds a project workflow relation"""
path = "project{}/wokrflow"
body = {
"project": {"id": project_id},
"workflow": {"id": workflow_id},
"name": "name"
}
return self._make_api_request(path, 'POST', body)
def project_workflow_delete(self, project_id: str, workflow_id: str) -> dict:
"""Delete a workflow from a project"""
path = "project/{}/workflow/{}"
return self._make_api_request(path, 'DELETE', '')
# Sessions
def project_sessions(self, project_id: str, sbsId: str) -> dict:
""" Get Project Sessions """
path = 'project/{}/sessions'.format(project_id)
return self._make_api_request(path, 'GET', {})
def workflow_sessions(self, project_id: str, session_id: str) -> dict:
""" Workflow sessions """
path = 'project/{}/sessions/{}'.format(project_id, session_id)
return self._make_api_request(path, 'GET', {})
def workflow_createSession(self, project_id: str, workflow_id: str, name: str, sbs_code: str = "") -> dict:
""" Workflow sessions """
path = 'project/{}/session'.format(project_id)
return self._make_api_request(path, 'POST', {"name": name, "workflow": workflow_id, "sbs_code": sbs_code})
def workflow_sessionUpdateName(self, project_id: str, session_id: str, name: str) -> dict:
""" Workflow sessions """
path = 'project/{}/session/{}'.format(project_id, session_id)
return self._make_api_request(path, 'PUT', {"name": name})
def workflow_sessionUpdateSBS(self, project_id: str, session_id: str, sbs_code: str) -> dict:
""" Workflow sessions """
path = 'project/{}/session/{}'.format(project_id, session_id)
return self._make_api_request(path, 'PUT', {"sbs_code": sbs_code})
def workflow_sessionDelete(self, project_id: str, session_id: str) -> dict:
""" Workflow sessions """
path = 'project/{}/session/{}'.format(project_id, session_id)
return self._make_api_request(path, 'DELETE', {})
# UTILS
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKCYAN = '\033[96m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def link(uri, label=None):
""" Private function"""
if label is None:
label = uri
parameters = ''
# OSC 8 ; params ; URI ST <name> OSC 8 ;; ST
escape_mask = '\033]8;{};{}\033\\{}\033]8;;\033\\'
return escape_mask.format(parameters, uri, bcolors.OKBLUE + label + bcolors.ENDC)
def selectLicense(license, licenses):
if license == "":
selectedLicenseName = licenses[0]["name"]
URL = link("https://docs.antcde.io/antconnect/python/#taken", "documentation")
if len(licenses) > 1:
print(
bcolors.WARNING + "Warning" + bcolors.ENDC + ": No license provided. A random license (" + bcolors.OKGREEN + "{}".format(
selectedLicenseName) + bcolors.ENDC + ") is selected to handle the tasks. Please specify which license you are using, see {}".format(
URL))
else:
print(
bcolors.OKBLUE + "Info" + bcolors.ENDC + ": You didn't provided a license. Since you have only one license (" + bcolors.OKGREEN + "{}".format(
selectedLicenseName) + bcolors.ENDC + "), this is automatically selected. Advised to add it, see {} ".format(
URL))
return licenses[0]
else:
return license
def depreciationMessage(type, name, date, doc):
URL = link("https://docs.antcde.io/antconnect/python/#{}".format(doc), "documentation")
if type == "param":
print(
bcolors.WARNING + "Warning" + bcolors.ENDC + ": The parameter: \"{}\" will be been depreciated from {}. Please update according to {}".format(
name, date, URL))
if type == "def":
print(
bcolors.WARNING + "Warning" + bcolors.ENDC + ": The function {} will be been depreciated from {}. Please update according to {}".format(
name, date, URL))
# Monitor the use of depreciated functions?
# If depreciation date is soon, inform? | ANTConnect | /ANTConnect-2023.9.3.tar.gz/ANTConnect-2023.9.3/antconnect/api.py | api.py |
============
ANYstructure
============
Save cost and time by efficient optimization and reporting!
ANYstructure is the ultimate steel structure design tool for plate fields and cylinders!
Weight optimization for all structures with machine learning capabilities.
Calculations are based on DNV standards and rules.
It is based on DNV standards and recommended practices.
The following is caluculated:
* Minimum plate thickness (DNV-OS-C101)
* Minimum section modulus of stiffener/plate (DNVGL-OS-C101)
* Minimum shear area (DNVGL-OS-C101)
* Buckling (DNVGL-RP-C201)or PULS (licenced DNV software)
* Buckling strength of shells DNV-RP-C202
* PULS buckling (DNV license needed)
* Machine learning buckling, PULS based
* Fatigue for plate/stiffener connection (DNVGL-RP-C203)
Loads are defined as follows:
* External surface loads defined by polynominal equations
* Tank loads are calculated automatically
Loads are combined according to DNVGL-OS-C101.
Installation
------------
The easiest way to install the package is via pip::
$ pip install anystructure
Usage
-----
An entry point is defined. After installing on PIP, just type "ANYstructure" in the command window.
Alternatively run \_\_main\_\_.py
Documentation
-------------
Documentation is cointained in the tool. Help -> Open documentation.
Website
-------------
https://sites.google.com/view/anystructure/start
| ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/README.rst | README.rst |
# ANYstructure #
ANYstructure is the ultimate steel structure design tool for plate fields and cylinders!
Weight optimization for all structures with machine learning capabilities.
Calculations are based on DNV standards and rules
### What's new in 4.9.1 ###
* Corrected bug in loading old save files
* Corrected error on buckling flat plate calculation
### What's new in 4.8 ###
* Reporting table on cylinders.
* Color coding on come cylinder properties.
* Corrected error on additional hoop stress input for cylinders.
### What's new in 4.7 ###
* Corrected error on girder caluculation for cylinder buckling.
* Added 1.10 load factor option for cylinder buckling.
* Better compability with linux.
* Python 3.11 based.
### What's new in 4.4 ###
* Backup and restore feature added.
### What's new in 4.3 ###
* General stability.
* User friendliness.
### What's new in 4.2 ###
* Bug fixing.
* Ukraininan theme.
### What's new in 4.0 ###
* Cylinder design and optimization!
* Flat plate prescriptive buckling improved. Girder calculation added.
* Updated GUI with color themes.
### What's new in 3.3 ###
* Extremely efficient Machine Learning version of PULS called ML-CL. Implemented for all optimizer options.
* Calculation of Center of Gravity and Center of Buoyancy.
* Reporting of weights and COG.
* Lots of bug fixes.
------------------------------------------------------------------------
## The following is calculated: ##
* Minimum plate thickness (DNV-OS-C101)
* Minimum section modulus of stiffener/plate (DNVGL-OS-C101)
* Minimum shear area (DNVGL-OS-C101)
* Buckling (DNVGL-RP-C201)or PULS (licenced DNV software)
* Buckling strength of shells DNV-RP-C202
* PULS buckling (DNV license needed)
* Machine learning buckling, PULS based
* Fatigue for plate/stiffener connection (DNVGL-RP-C203)
Compartments (tank pressures) are created automatically.
Pressures on external hull (or any other generic location) is defined by specifying equations.
You can optimize cylinders, single plate/stiffener field or multiple. Geometry of double bottom can be optimized.
PLEASE CONTRIBUTE. REPORT BUGS ERRORS ETC.
For windows executable (.exe) version for non-coders, use the link below.
Feedback: audunarn@gmail.com or discuss on github.
Please like, share or comment on LinkedIn: https://www.linkedin.com/in/audun-arnesen-nyhus-6aa17118/
Screenshot (this example can be loaded from file "ship_section_example.txt"):

| ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/README.md | README.md |
import tkinter as tk
from _tkinter import TclError
import os
try:
import any_files.example_data as test
except ModuleNotFoundError:
import ANYstructure.any_files.example_data as test
class CreateStressesWindow():
'''
This class initiates the GUI used to define stresses for the selected structure.
'''
def __init__(self, master, app=None):
super(CreateStressesWindow, self).__init__()
if __name__ == '__main__':
self._initial_structure_obj = test.get_structure_object()
self.default_stresses = test.get_default_stresses()
image_dir = os.path.dirname(__file__) + '\\images\\'
else:
self.app = app
try:
self._initial_structure_obj = app._line_to_struc[app._active_line][0]
except KeyError:
self._initial_structure_obj = None
self.default_stresses = app._default_stresses
image_dir = app._root_dir + '\\images\\'
self._frame = master
self._frame.wm_title("Specify strucutre - returned to input field in main window")
self._frame.geometry('1500x900')
self._frame.grab_set()
self._opt_runned = False
self._opt_resutls = ()
self._draw_scale = 500
self._canvas_dim = (500, 450)
tk.Label(self._frame, text='-- Global stresses and fixation parameter in plate/stiffener --',
font='Verdana 15 bold').place(x=10, y=10)
ent_w = 10
# stresses in plate and stiffener
self._new_structure_type = tk.StringVar()
self._new_trans_stress_high = tk.DoubleVar()
self._new_trans_stress_low = tk.DoubleVar()
self._new_axial_stress_1 = tk.DoubleVar()
self._new_axial_stress_2 = tk.DoubleVar()
self._new_shear_stress = tk.DoubleVar()
self._new_km1 = tk.DoubleVar()
self._new_km2 = tk.DoubleVar()
self._new_km3 = tk.DoubleVar()
self._new_kpp = tk.DoubleVar()
self._new_kps = tk.DoubleVar()
self._new_max_pressure_side = tk.StringVar()
self._ent_structure_type = tk.OptionMenu(self._frame,self._new_structure_type,command=self.change_option_menu,
*self.default_stresses.keys())
self._ent_trans_stress_high = tk.Entry(self._frame, textvariable=self._new_trans_stress_high, width=ent_w)
self._ent_trans_stress_low = tk.Entry(self._frame, textvariable=self._new_trans_stress_low, width=ent_w)
self._ent_axial_stress_1 = tk.Entry(self._frame, textvariable=self._new_axial_stress_1, width=ent_w)
self._ent_axial_stress_2 = tk.Entry(self._frame, textvariable=self._new_axial_stress_2, width=ent_w)
self._ent_shear_stress = tk.Entry(self._frame, textvariable=self._new_shear_stress, width=ent_w)
self._ent_km1 = tk.Entry(self._frame, textvariable=self._new_km1, width=ent_w)
self._ent_km2 = tk.Entry(self._frame, textvariable=self._new_km2, width=ent_w)
self._ent_km3 = tk.Entry(self._frame, textvariable=self._new_km3, width=ent_w)
self._ent_kpp = tk.Entry(self._frame, textvariable=self._new_kpp, width=ent_w)
self._ent_kps = tk.Entry(self._frame, textvariable=self._new_kps, width=ent_w)
self._ent_pressure_side = tk.OptionMenu(self._frame,self._new_max_pressure_side,*('p','s'))
start_x,start_y,dx,dy = 20,100,100,35
###
# tk.Label(self._frame, text='Input stresses and parameters:', font='Verdana 12 bold',fg='red') \
# .place(x=start_x , y=start_y + 9 * dy)
tk.Label(self._frame, text='Select strucutre type:', font='Verdana 9',fg='red') \
.place(x=start_x , y=start_y + 10 * dy)
tk.Label(self._frame, text='Sigma,y1_Sd - large transversal stress', font='Verdana 9') \
.place(x=start_x , y=start_y + 11 * dy)
tk.Label(self._frame, text='[MPa]', font='Verdana 9 bold') \
.place(x=start_x + dx * 4, y=start_y + 11 * dy)
tk.Label(self._frame, text='Sigma,y2_Sd - small transversal stress', font='Verdana 9') \
.place(x=start_x, y=start_y + 12* dy)
tk.Label(self._frame, text='[MPa]', font='Verdana 9 bold') \
.place(x=start_x + dx * 4, y=start_y + 12 * dy)
tk.Label(self._frame, text='Sigma,x_Sd - axial stress 1', font='Verdana 9') \
.place(x=start_x, y=start_y + 13 * dy)
tk.Label(self._frame, text='[MPa]', font='Verdana 9 bold') \
.place(x=start_x + dx * 4, y=start_y + 13 * dy)
tk.Label(self._frame, text='Sigma,x_Sd - axial stress 2', font='Verdana 9') \
.place(x=start_x, y=start_y + 14 * dy)
tk.Label(self._frame, text='[MPa]', font='Verdana 9 bold') \
.place(x=start_x + dx * 4, y=start_y + 14 * dy)
tk.Label(self._frame, text='Tau,xy - shear stress', font='Verdana 9') \
.place(x=start_x, y=start_y + 15 * dy)
tk.Label(self._frame, text='[MPa]', font='Verdana 9 bold') \
.place(x=start_x + dx * 4, y=start_y + 15 * dy)
tk.Label(self._frame, text='km1, bending moment factor', font='Verdana 9') \
.place(x=start_x, y=start_y + 16* dy)
tk.Label(self._frame, text='km2, bending moment factor', font='Verdana 9') \
.place(x=start_x, y=start_y + 17 * dy)
tk.Label(self._frame, text='km3, bending moment factor', font='Verdana 9') \
.place(x=start_x, y=start_y + 18 * dy)
tk.Label(self._frame, text='kpp, fixation parameter plate', font='Verdana 9') \
.place(x=start_x, y=start_y + 19 * dy)
tk.Label(self._frame, text='kps, fixation parameter stiffener', font='Verdana 9') \
.place(x=start_x, y=start_y + 20 * dy)
tk.Label(self._frame, text='Max pressure side (plate of stiffener)', font='Verdana 9 bold') \
.place(x=start_x+5*dx, y=start_y + 8 * dy)
self._ent_structure_type.place(x=start_x + dx * 3, y=start_y + 10 * dy)
self._ent_trans_stress_high.place(x=start_x + dx * 3, y=start_y + 11 * dy)
self._ent_trans_stress_low.place(x=start_x + dx * 3, y=start_y + 12 * dy)
self._ent_axial_stress_1.place(x=start_x + dx * 3, y=start_y + 13 * dy)
self._ent_axial_stress_2.place(x=start_x + dx * 3, y=start_y + 14 * dy)
self._ent_shear_stress.place(x=start_x + dx * 3, y=start_y + 15 * dy)
self._ent_km1.place(x=start_x + dx * 3, y=start_y + 16 * dy)
self._ent_km2.place(x=start_x + dx * 3, y=start_y + 17 * dy)
self._ent_km3.place(x=start_x + dx * 3, y=start_y + 18 * dy)
self._ent_kpp.place(x=start_x + dx * 3, y=start_y + 19 * dy)
self._ent_kps.place(x=start_x + dx * 3, y=start_y + 20 * dy)
self._ent_pressure_side.place(x=start_x+8*dx, y=start_y + 8 * dy)
# setting default values
init_dim = 0.05
init_thk = 0.002
if self._initial_structure_obj != None:
self._new_trans_stress_high.set(self._initial_structure_obj.Plate.get_sigma_y1())
self._new_trans_stress_low.set(self._initial_structure_obj.Plate.get_sigma_y2())
self._new_axial_stress_1.set(self._initial_structure_obj.Plate.get_sigma_x1())
self._new_axial_stress_2.set(self._initial_structure_obj.Plate.get_sigma_x2())
self._new_shear_stress.set(self._initial_structure_obj.Plate.get_tau_xy())
self._new_km1.set(self._initial_structure_obj.Plate.get_km1())
self._new_km2.set(self._initial_structure_obj.Plate.get_km2())
self._new_km3.set(self._initial_structure_obj.Plate.get_km3())
self._new_kpp.set(self._initial_structure_obj.Plate.get_kpp())
self._new_kps.set(self._initial_structure_obj.Plate.get_kps())
self._new_structure_type.set(self._initial_structure_obj.Plate.get_structure_type())
else:
self._new_structure_type.set('GENERAL_INTERNAL_WT')
self._new_trans_stress_high.set(self.default_stresses[self._new_structure_type.get()][0])
self._new_trans_stress_low.set(self.default_stresses[self._new_structure_type.get()][1])
self._new_axial_stress_1.set(self.default_stresses[self._new_structure_type.get()][2])
self._new_axial_stress_1.set(self.default_stresses[self._new_structure_type.get()][3])
self._new_shear_stress.set(self.default_stresses[self._new_structure_type.get()][4])
self._new_km1.set(12)
self._new_km2.set(24)
self._new_km3.set(12)
self._new_kpp.set(1)
self._new_kps.set(1)
self._new_max_pressure_side.set('p')
try:
img_file_name = 'img_transverse_stress.gif'
if os.path.isfile('images/' + img_file_name):
file_path ='images/' + img_file_name
else:
file_path = app._root_dir + '/images/' + img_file_name
photo_transverse = tk.PhotoImage(file=file_path)
label_trans = tk.Label(self._frame, image=photo_transverse)
label_trans.image = photo_transverse # keep a reference!
label_trans.place(x=start_x, y=60)
except TclError:
pass
try:
img_file_name = "img_axial_stresses.gif"
if os.path.isfile('images/' + img_file_name):
file_path ='images/' + img_file_name
else:
file_path = app._root_dir + '/images/' + img_file_name
photo_axial = tk.PhotoImage(file=file_path)
label_axial = tk.Label(self._frame, image=photo_axial)
label_axial.image = photo_axial # keep a reference!
label_axial.place(x=start_x+5*dx, y=60)
except TclError:
pass
try:
img_file_name = 'img_fixation_parameters.gif'
if os.path.isfile('images/' + img_file_name):
file_path ='images/' + img_file_name
else:
file_path = app._root_dir + '/images/' + img_file_name
photo = tk.PhotoImage(file=file_path)
label_fix = tk.Label(self._frame, image=photo)
label_fix.image = photo # keep a reference!
label_fix.place(x=start_x+9.5*dx, y=60)
except TclError:
pass
tk.Label(self._frame,text='The stresses are global values and is estimated '
'\nby user.\n'
'Alterntively read out stresses from FE-model.\n'
'Suggestions for input:\n'
'Transverse stresses (Sigma,y_Sd is calculated):\n'
' - conservative - about 100 MPa \n'
' - non-conservative - about 60 MPa\n'
'Axial stresses: \n'
' - about 60 MPa\n'
' - non-conservative - about 40 MPa\n'
'Shear stresses: \n'
' - about 20 MPa\n'
' - non-conservative - about 1 MPa', justify=tk.LEFT,
font = 'Verdana 10', fg = 'blue',bg='white')\
.place(x=start_x+dx*4.5,y=start_y+dy*11)
self._close_and_save = tk.Button(self._frame, text='Return and set stresses and fixation parameter',
command=self.save_and_close, bg='green', font='Verdana 10', fg='yellow')
self._close_and_save.place(x=start_x + dx * 4.5, y=start_y + dy * 19)
def change_option_menu(self,event):
'''
Action when changing the structure type
:return:
'''
self._new_trans_stress_high.set(self.default_stresses[self._new_structure_type.get()][0])
self._new_trans_stress_low.set(self.default_stresses[self._new_structure_type.get()][1])
self._new_axial_stress_1.set(self.default_stresses[self._new_structure_type.get()][2])
self._new_axial_stress_1.set(self.default_stresses[self._new_structure_type.get()][3])
self._new_shear_stress.set(self.default_stresses[self._new_structure_type.get()][4])
def save_and_close(self):
'''
Save and close
:return:
'''
if __name__ == '__main__':
self._frame.destroy()
return
self.app.on_close_stresses_window([self._new_trans_stress_high.get(),
self._new_trans_stress_low.get(),
self._new_axial_stress_1.get(),
self._new_axial_stress_2.get(),
self._new_shear_stress.get(),
self._new_km1.get(),
self._new_km2.get(),
self._new_km3.get(),
self._new_kpp.get(),
self._new_kps.get(),
self._new_structure_type.get(),
self._new_max_pressure_side.get()])
self._frame.destroy()
if __name__ == '__main__':
root = tk.Tk()
my_app = CreateStressesWindow(root,app=None)
root.mainloop() | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/stresses_window.py | stresses_window.py |
try:
import any_files.calc_loads as load
import any_files.calc_structure as calc_structure
import any_files.make_grid_numpy as grid
except ModuleNotFoundError:
import ANYstructure.any_files.calc_loads as load
import ANYstructure.any_files.calc_structure as calc_structure
import ANYstructure.any_files.make_grid_numpy as grid
import random
structure_types = {'vertical': ['BBS', 'SIDE_SHELL', 'SSS'],
'horizontal': ['BOTTOM', 'BBT', 'HOPPER', 'MD'],
'non-wt': ['FRAME', 'GENERAL_INTERNAL_NONWT'],
'internals': ['INNER_SIDE', 'FRAME_WT', 'GENERAL_INTERNAL_WT',
'INTERNAL_ZERO_STRESS_WT', 'INTERNAL_LOW_STRESS_WT']}
obj_dict = {'mat_yield': [355e6, 'Pa'], 'mat_factor': [1.10, ''],'span': [3.3, 'm'], 'spacing': [0.68, 'm'],
'plate_thk': [0.025, 'm'],
'stf_web_height': [0.250297358, 'm'], 'stf_web_thk': [0.012, 'm'], 'stf_flange_width': [0.052, 'm'],
'stf_flange_thk': [0.029702642, 'm'], 'structure_type': ['BOTTOM', ''], 'plate_kpp': [1, ''],
'stf_kps': [1, ''], 'stf_km1': [12, ''], 'stf_km2': [24, ''], 'stf_km3': [12, ''],
'sigma_y1': [100, 'MPa'], 'sigma_y2': [100, 'MPa'], 'sigma_x2': [102.7, 'MPa'], 'sigma_x1': [102.7, 'MPa'],
'tau_xy': [5, 'MPa'],
'stf_type': ['T', ''], 'structure_types': [structure_types, ''], 'zstar_optimization': [True, ''],
'puls buckling method':[1,''], 'puls boundary':['Int',''], 'puls stiffener end':['C',''],
'puls sp or up':['SP',''], 'puls up boundary' :['SSSS',''], 'panel or shell': ['panel', ''],
'pressure side': ['both sides', ''], 'girder_lg': [5, 'm']}
obj_dict_cyl_long = {'mat_yield': [355e6, 'Pa'], 'mat_factor': [1.15, ''],'span': [5, 'm'], 'spacing': [0.6, 'm'],
'plate_thk': [0.015, 'm'],
'stf_web_height': [0.38, 'm'], 'stf_web_thk': [0.012, 'm'], 'stf_flange_width': [0.15, 'm'],
'stf_flange_thk': [0.02, 'm'], 'structure_type': ['BOTTOM', ''], 'plate_kpp': [1, ''],
'stf_kps': [1, ''], 'stf_km1': [12, ''], 'stf_km2': [24, ''], 'stf_km3': [12, ''],
'sigma_y1': [80, 'MPa'], 'sigma_y2': [80, 'MPa'], 'sigma_x2': [80, 'MPa'], 'sigma_x1': [80, 'MPa'], 'tau_xy': [5, 'MPa'],
'stf_type': ['T', ''], 'structure_types': [structure_types, ''], 'zstar_optimization': [True, ''],
'puls buckling method':[2,''], 'puls boundary':['Int',''], 'puls stiffener end':['C',''],
'puls sp or up':['SP',''], 'puls up boundary' :['SSSS',''] , 'panel or shell': ['shell', ''] }
obj_dict_cyl_ring = {'mat_yield': [355e6, 'Pa'], 'mat_factor': [1.15, ''],'span': [5, 'm'], 'spacing': [0.6, 'm'],
'plate_thk': [0.015, 'm'],
'stf_web_height': [0.4, 'm'], 'stf_web_thk': [0.012, 'm'], 'stf_flange_width': [0.046, 'm'],
'stf_flange_thk': [0.024957, 'm'], 'structure_type': ['BOTTOM', ''], 'plate_kpp': [1, ''],
'stf_kps': [1, ''], 'stf_km1': [12, ''], 'stf_km2': [24, ''], 'stf_km3': [12, ''],
'sigma_y1': [80, 'MPa'], 'sigma_y2': [80, 'MPa'], 'sigma_x2': [80, 'MPa'], 'sigma_x1': [80, 'MPa'], 'tau_xy': [5, 'MPa'],
'stf_type': ['L-bulb', ''], 'structure_types': [structure_types, ''], 'zstar_optimization': [True, ''],
'puls buckling method':[2,''], 'puls boundary':['Int',''], 'puls stiffener end':['C',''],
'puls sp or up':['SP',''], 'puls up boundary' :['SSSS',''] , 'panel or shell': ['shell', ''] }
obj_dict_cyl_heavy_ring = {'mat_yield': [355e6, 'Pa'], 'mat_factor': [1.15, ''],'span': [5, 'm'], 'spacing': [0.6, 'm'],
'plate_thk': [0.015, 'm'],
'stf_web_height': [0.77, 'm'], 'stf_web_thk': [0.014, 'm'], 'stf_flange_width': [0.2, 'm'],
'stf_flange_thk': [0.03, 'm'], 'structure_type': ['BOTTOM', ''], 'plate_kpp': [1, ''],
'stf_kps': [1, ''], 'stf_km1': [12, ''], 'stf_km2': [24, ''], 'stf_km3': [12, ''],
'sigma_y1': [80, 'MPa'], 'sigma_y2': [80, 'MPa'], 'sigma_x2': [80, 'MPa'], 'sigma_x1': [80, 'MPa'], 'tau_xy': [5, 'MPa'],
'stf_type': ['L-bulb', ''], 'structure_types': [structure_types, ''], 'zstar_optimization': [True, ''],
'puls buckling method':[2,''], 'puls boundary':['Int',''], 'puls stiffener end':['C',''],
'puls sp or up':['SP',''], 'puls up boundary' :['SSSS',''] , 'panel or shell': ['shell', ''] }
obj_dict_cyl_long2 = {'mat_yield': [355e6, 'Pa'], 'mat_factor': [1.15, ''],'span': [5, 'm'], 'spacing': [0.65, 'm'],
'plate_thk': [0.02, 'm'],
'stf_web_height': [0.24-0.0249572753957594, 'm'], 'stf_web_thk': [0.012, 'm'], 'stf_flange_width': [0.046, 'm'],
'stf_flange_thk': [0.0249572753957594, 'm'], 'structure_type': ['BOTTOM', ''], 'plate_kpp': [1, ''],
'stf_kps': [1, ''], 'stf_km1': [12, ''], 'stf_km2': [24, ''], 'stf_km3': [12, ''],
'sigma_y1': [80, 'MPa'], 'sigma_y2': [80, 'MPa'], 'sigma_x2': [80, 'MPa'], 'sigma_x1': [80, 'MPa'], 'tau_xy': [5, 'MPa'],
'stf_type': ['L-bulb', ''], 'structure_types': [structure_types, ''], 'zstar_optimization': [True, ''],
'puls buckling method':[2,''], 'puls boundary':['Int',''], 'puls stiffener end':['C',''],
'puls sp or up':['SP',''], 'puls up boundary' :['SSSS',''] , 'panel or shell': ['shell', ''] }
obj_dict_cyl_ring2 = {'mat_yield': [355e6, 'Pa'], 'mat_factor': [1.15, ''],'span': [5, 'm'], 'spacing': [0.7, 'm'],
'plate_thk': [0.020, 'm'],
'stf_web_height': [0.3, 'm'], 'stf_web_thk': [0.012, 'm'], 'stf_flange_width': [0.12, 'm'],
'stf_flange_thk': [0.02, 'm'], 'structure_type': ['BOTTOM', ''], 'plate_kpp': [1, ''],
'stf_kps': [1, ''], 'stf_km1': [12, ''], 'stf_km2': [24, ''], 'stf_km3': [12, ''],
'sigma_y1': [80, 'MPa'], 'sigma_y2': [80, 'MPa'], 'sigma_x2': [80, 'MPa'], 'sigma_x1': [80, 'MPa'], 'tau_xy': [5, 'MPa'],
'stf_type': ['T', ''], 'structure_types': [structure_types, ''], 'zstar_optimization': [True, ''],
'puls buckling method':[2,''], 'puls boundary':['Int',''], 'puls stiffener end':['C',''],
'puls sp or up':['SP',''], 'puls up boundary' :['SSSS',''] , 'panel or shell': ['shell', ''] }
obj_dict_cyl_heavy_ring2 = {'mat_yield': [355e6, 'Pa'], 'mat_factor': [1.15, ''],'span': [5, 'm'], 'spacing': [0.6, 'm'],
'plate_thk': [0.015, 'm'],
'stf_web_height': [0.7, 'm'], 'stf_web_thk': [0.016, 'm'], 'stf_flange_width': [0.2, 'm'],
'stf_flange_thk': [0.03, 'm'], 'structure_type': ['BOTTOM', ''], 'plate_kpp': [1, ''],
'stf_kps': [1, ''], 'stf_km1': [12, ''], 'stf_km2': [24, ''], 'stf_km3': [12, ''],
'sigma_y1': [80, 'MPa'], 'sigma_y2': [80, 'MPa'], 'sigma_x2': [80, 'MPa'], 'sigma_x1': [80, 'MPa'], 'tau_xy': [5, 'MPa'],
'stf_type': ['T', ''], 'structure_types': [structure_types, ''], 'zstar_optimization': [True, ''],
'puls buckling method':[2,''], 'puls boundary':['Int',''], 'puls stiffener end':['C',''],
'puls sp or up':['SP',''], 'puls up boundary' :['SSSS',''] , 'panel or shell': ['shell', ''] }
obj_dict_heavy = {'mat_yield': [355e6, 'Pa'], 'mat_factor': [1.15, ''],'span': [3700, 'm'], 'spacing': [0.75, 'm'],
'plate_thk': [0.018, 'm'],
'stf_web_height': [0.500, 'm'], 'stf_web_thk': [0.0120, 'm'], 'stf_flange_width': [0.150, 'm'],
'stf_flange_thk': [0.02, 'm'], 'structure_type': ['BOTTOM', ''], 'plate_kpp': [1, ''],
'stf_kps': [1, ''], 'stf_km1': [12, ''], 'stf_km2': [24, ''], 'stf_km3': [12, ''],
'sigma_y1': [80, 'MPa'], 'sigma_y2': [80, 'MPa'], 'sigma_x2': [80, 'MPa'], 'sigma_x1': [80, 'MPa'], 'tau_xy': [5, 'MPa'],
'stf_type': ['T', ''], 'structure_types': [structure_types, ''], 'zstar_optimization': [True, ''],
'puls buckling method':[2,''], 'puls boundary':['Int',''], 'puls stiffener end':['C',''],
'puls sp or up':['SP',''], 'puls up boundary' :['SSSS',''], 'panel or shell': ['panel', ''],
'pressure side': ['both sides', '']}
obj_dict2 = {'mat_yield': [355e6, 'Pa'], 'mat_factor': [1.15, ''],'span': [4, 'm'], 'spacing': [0.7, 'm'],
'plate_thk': [0.018, 'm'],
'stf_web_height': [0.36, 'm'], 'stf_web_thk': [0.012, 'm'], 'stf_flange_width': [0.15, 'm'],
'stf_flange_thk': [0.02, 'm'], 'structure_type': ['BOTTOM', ''], 'plate_kpp': [1, ''],
'stf_kps': [1, ''], 'stf_km1': [12, ''], 'stf_km2': [24, ''], 'stf_km3': [12, ''],
'sigma_y1': [100, 'MPa'], 'sigma_y2': [100, 'MPa'], 'sigma_x2': [80, 'MPa'], 'sigma_x1': [50, 'MPa'], 'tau_xy': [5, 'MPa'],
'stf_type': ['T', ''], 'structure_types': [structure_types, ''], 'zstar_optimization': [True, ''],
'puls buckling method':[2,''], 'puls boundary':['Int',''], 'puls stiffener end':['C',''],
'puls sp or up':['SP',''], 'puls up boundary' :['SSSS',''] }
obj_dict_sec_error = {'mat_yield': [355e6, 'Pa'], 'mat_factor': [1.15, ''],'span': [3.5, 'm'], 'spacing': [0.875, 'm'],
'plate_thk': [0.023, 'm'],
'stf_web_height': [0.41, 'm'], 'stf_web_thk': [0.012, 'm'], 'stf_flange_width': [0.17, 'm'],
'stf_flange_thk': [0.015, 'm'], 'structure_type': ['SIDE_SHELL', ''], 'plate_kpp': [1, ''],
'stf_kps': [1, ''], 'stf_km1': [12, ''], 'stf_km2': [24, ''], 'stf_km3': [12, ''],
'sigma_y1': [93, 'MPa'], 'sigma_y2': [93, 'MPa'], 'sigma_x2': [80, 'MPa'], 'sigma_x1': [39.7, 'MPa'], 'tau_xy': [2.8, 'MPa'],
'stf_type': ['T', ''], 'structure_types': [structure_types, ''], 'zstar_optimization': [True, ''],
'puls buckling method':[2,''], 'puls boundary':['Int',''], 'puls stiffener end':['C',''],
'puls sp or up':['SP',''], 'puls up boundary' :['SSSS',''] }
obj_dict_L = {'mat_yield': [355e6, 'Pa'], 'mat_factor': [1.15, ''], 'span': [3.6, 'm'], 'spacing': [0.82, 'm'],
'plate_thk': [0.018, 'm'],
'stf_web_height': [0.4, 'm'], 'stf_web_thk': [0.014, 'm'], 'stf_flange_width': [0.072, 'm'],
'stf_flange_thk': [0.0439, 'm'], 'structure_type': ['BOTTOM', ''], 'plate_kpp': [0.5, ''],
'stf_kps': [1, ''], 'stf_km1': [12, ''], 'stf_km2': [24, ''], 'stf_km3': [12, ''],
'sigma_y1': [102, 'MPa'], 'sigma_y2': [106.9, 'MPa'], 'sigma_x2': [66.8, 'MPa'], 'sigma_x1': [66.8, 'MPa'], 'tau_xy': [20, 'MPa'],
'stf_type': ['L', ''], 'structure_types': [structure_types, ''], 'zstar_optimization': [True, ''],
'puls buckling method':[2,''], 'puls boundary':['Int',''], 'puls stiffener end':['C',''],
'puls sp or up':['SP',''], 'puls up boundary' :['SSSS',''] , 'panel or shell': ['panel', ''], 'pressure side': ['both sides', ''] }
obj_dict_fr = {'mat_yield': [355e6, 'Pa'], 'mat_factor': [1.15, ''],'span': [2.5, 'm'], 'spacing': [0.74, 'm'],
'plate_thk': [0.018, 'm'],
'stf_web_height': [0.2, 'm'], 'stf_web_thk': [0.018, 'm'], 'stf_flange_width': [0, 'm'],
'stf_flange_thk': [0, 'm'], 'structure_type': ['FRAME', ''], 'plate_kpp': [1, ''],
'stf_kps': [1, ''], 'stf_km1': [12, ''], 'stf_km2': [24, ''], 'stf_km3': [12, ''],
'sigma_y1': [150, 'MPa'], 'sigma_y2': [92.22, 'MPa'], 'sigma_x2': [-54.566, 'MPa'], 'sigma_x1': [-54.566, 'MPa'], 'tau_xy': [16.67, 'MPa'],
'stf_type': ['FB', ''], 'structure_types': [structure_types, ''], 'zstar_optimization': [True, ''],
'puls buckling method':[2,''], 'puls boundary':['Int',''], 'puls stiffener end':['C',''],
'puls sp or up':['SP',''], 'puls up boundary' :['SSSS',''], 'panel or shell': ['panel', ''], 'pressure side': ['both sides', ''] }
point_dict = {'point5': [12.0, 2.5], 'point8': [0.0, 2.5], 'point3': [8.0, 0.0], 'point2': [4.0, 0.0],
'point6': [8.0, 2.5], 'point7': [4.0, 2.5], 'point9': [0.0, 20.0], 'point4': [12.0, 0.0],
'point10': [12.0, 20.0], 'point1': [0.0, 0.0]}
line_dict = {'line8': [9, 8], 'line6': [7, 6], 'line12': [2, 7], 'line3': [3, 4], 'line13': [3, 6], 'line1': [1, 2],
'line10': [5, 10], 'line11': [1, 8], 'line7': [7, 8], 'line9': [9, 10], 'line5': [5, 6],
'line4': [5, 4], 'line2': [3, 2]}
opt_frames = {'opt_frame1': [[2.4, 0.0], [2.4, 2.5]], 'opt_frame2': [[4.8, 0.0], [4.8, 2.5]],
'opt_frame3': [[7.2, 0.0], [7.2, 2.5]], 'opt_frame4': [[9.6, 0.0], [9.6, 2.5]],
'opt_frame_start': [[0.0, 0.0], [0.0, 2.5]], 'opt_frame_stop': [[12.0, 0.0], [12.0, 2.5]]}
fat_obj_dict = {'SN-curve': 'Ec','SCF': 1,'Design life': 20, 'n0':10000, 'Weibull': (0.8, 0.8, 0.8),
'Period': (9, 9, 9), 'Fraction': (1, 0, 0), 'CorrLoc': (0.5, 0.5, 0.5),
'Order': ('Loaded', 'Ballast', 'Part'), 'Accelerations':(0.5, 0.5, 0.5), 'DFF':2}
fat_obj_dict2 = {'SN-curve': 'Ec','SCF': 1,'Design life': 20, 'n0':10000, 'Weibull': (0.8, 0.8, 0.8),
'Period': (9, 9, 9), 'Fraction': (1, 0, 0), 'CorrLoc': (0.5, 0.5, 0.5),
'Order': ('Loaded', 'Ballast', 'Part'), 'Accelerations':(0.5, 0.5, 0.5), 'DFF':2}
fat_obj_dict_problematic = {'SN-curve': 'Ec','SCF': 1,'Design life': 20, 'n0':500571428.0, 'Weibull': (0.8, 0.8, 0),
'Period': (8, 8, 0), 'Fraction': (0.5, 0.5, 0), 'CorrLoc': (0.5, 0.5, 0),
'Order': ('Loaded', 'Ballast', 'Part'), 'Accelerations':(0.5, 0.5, 0), 'DFF':2}
loa_fls = {'static_draft':None,'poly_third':1,'poly_second':50,'poly_first':10,'poly_const':5000,'man_press':0,
'load_condition':'loaded','name_of_load':'test_load_laoded_FLS','limit_state':'FLS'}
loa_uls = {'static_draft':None,'poly_third':2,'poly_second':20,'poly_first':20,'poly_const':2000,'man_press':0,
'load_condition':'loaded','name_of_load':'test_load_loaded_ULS','limit_state':'ULS'}
bal_fls = {'static_draft':None,'poly_third':5.5,'poly_second':10,'poly_first':5.5,'poly_const':1000,'man_press':0,
'load_condition':'ballast','name_of_load':'test_load_ballast_FLS','limit_state':'FLS'}
bal_uls = {'static_draft':None,'poly_third':2,'poly_second':20,'poly_first':20,'poly_const':2000,'man_press':0,
'load_condition':'ballast','name_of_load':'test_load_ballast_ULS','limit_state':'ULS'}
tank_dict_ballast = {'acc': {'dyn_ballast': 3.0, 'dyn_loaded': 3.0, 'static': 9.81}, 'added_press': 25000.0,
'cells': 10632,'comp_no': 4, 'content': 'ballast', 'density': 1025.0, 'max_el': 20.0,
'min_el': 0.0}
comp2 = {'acc': {'static': 9.81, 'dyn_ballast': 3.0, 'dyn_loaded': 3.0}, 'max_el': 29.5, 'added_press': 25000.0,
'cells': 29591, 'density': 1025.0, 'content': 'crude_oil', 'comp_no': 2, 'min_el': 2.5}
comp3 = {'acc': {'static': 9.81, 'dyn_ballast': 3.0, 'dyn_loaded': 3.0}, 'max_el': 29.5, 'added_press': 25000.0,
'cells': 19638, 'density': 1025.0, 'content': 'crude_oil', 'comp_no': 3, 'min_el': 2.5}
comp4 = {'acc': {'static': 9.81, 'dyn_ballast': 3.0, 'dyn_loaded': 3.0}, 'max_el': 29.5, 'added_press': 25000.0,
'cells': 19072, 'density': 1025.0, 'content': 'ballast', 'comp_no': 4, 'min_el': 0.0}
load_side = {'poly_third': 0.0, 'poly_second': 303.0, 'poly_first': -3750.0, 'poly_const': 153000.0,
'load_condition': 'ballast', 'structure_type': None, 'man_press': None, 'static_draft': None,
'name_of_load': 'ballast_side', 'limit_state': 'ULS'}
load_bottom = {'poly_third': 0.0, 'poly_second': 31.0, 'poly_first': -83.0, 'poly_const': 45800.0,
'load_condition': 'ballast', 'structure_type': None, 'man_press': None, 'static_draft': None,
'name_of_load': 'ballast_bottom', 'limit_state': 'ULS'}
load_static = {'poly_third': None, 'poly_second': None, 'poly_first': None, 'poly_const': None,
'load_condition': 'ballast', 'structure_type': None, 'man_press': None, 'static_draft': 15.0,
'name_of_load': 'ballast_static', 'limit_state': 'ULS'}
load_slamming = {'poly_third': 0, 'poly_second': 0, 'poly_first': 0, 'poly_const': 1000000.0,
'load_condition': 'slamming', 'structure_type': None, 'man_press': None, 'static_draft': None,
'name_of_load': 'slamming', 'limit_state': None}
ex_comp1 = {'comp_no': 2, 'cells': 32829, 'min_el': 2.5, 'max_el': 30.9, 'content': '', 'added_press': 25000.0,
'acc': {'static': 9.81, 'dyn_loaded': 3.0, 'dyn_ballast': 3.0}, 'density': 1025.0,
'all_types': ['BOTTOM', 'BBS', 'BBT', 'HOPPER', 'SIDE_SHELL', 'INNER_SIDE', 'FRAME', 'FRAME_WT',
'SSS', 'MD', 'GENERAL_INTERNAL_WT', 'GENERAL_INTERNAL_NONWT', 'INTERNAL_1_MPA',
'INTERNAL_LOW_STRESS_WT']}
ex_comp2 = {'comp_no': 3, 'cells': 62530, 'min_el': 2.5, 'max_el': 30.900000000000002, 'content': '',
'added_press': 25000.0, 'acc': {'static': 9.81, 'dyn_loaded': 3.0, 'dyn_ballast': 3.0},
'density': 1025.0, 'all_types': ['BOTTOM', 'BBS', 'BBT', 'HOPPER', 'SIDE_SHELL', 'INNER_SIDE', 'FRAME',
'FRAME_WT', 'SSS', 'MD', 'GENERAL_INTERNAL_WT', 'GENERAL_INTERNAL_NONWT',
'INTERNAL_1_MPA', 'INTERNAL_LOW_STRESS_WT']}
ex_comp3 = {'comp_no': 4, 'cells': 14559, 'min_el': 0.0, 'max_el': 30.900000000000002, 'content': '',
'added_press': 25000.0, 'acc': {'static': 9.81, 'dyn_loaded': 3.0, 'dyn_ballast': 3.0},
'density': 1025.0, 'all_types': ['BOTTOM', 'BBS', 'BBT', 'HOPPER', 'SIDE_SHELL', 'INNER_SIDE',
'FRAME', 'FRAME_WT', 'SSS', 'MD', 'GENERAL_INTERNAL_WT',
'GENERAL_INTERNAL_NONWT', 'INTERNAL_1_MPA', 'INTERNAL_LOW_STRESS_WT']}
ex_comp4 = {'comp_no': 5, 'cells': 2785, 'min_el': 0.0, 'max_el': 2.5, 'content': '', 'added_press': 25000.0,
'acc': {'static': 9.81, 'dyn_loaded': 3.0, 'dyn_ballast': 3.0}, 'density': 1025.0,
'all_types': ['BOTTOM', 'BBS', 'BBT', 'HOPPER', 'SIDE_SHELL', 'INNER_SIDE', 'FRAME', 'FRAME_WT',
'SSS', 'MD', 'GENERAL_INTERNAL_WT', 'GENERAL_INTERNAL_NONWT', 'INTERNAL_1_MPA',
'INTERNAL_LOW_STRESS_WT']}
run_dict = {'line3': {'Identification': 'line3', 'Length of panel': 4000.0, 'Stiffener spacing': 700.0,
'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T',
'Stiffener boundary': 'C', 'Stiff. Height': 400.0, 'Web thick.': 12.0, 'Flange width': 200.0,
'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0,
'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0,
'Yield stress stiffener': 355.0, 'Axial stress': 101.7, 'Trans. stress 1': 100.0,
'Trans. stress 2': 100.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.41261,
'In-plane support': 'Int'},
'line4': {'Identification': 'line4', 'Length of panel': 3900.0, 'Stiffener spacing': 700.0, 'Plate thickness': 18.0,
'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 400.0, 'Web thick.': 12.0, 'Flange width': 250.0, 'Flange thick.': 14.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 100.5, 'Trans. stress 1': 100.0, 'Trans. stress 2': 100.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.406561, 'In-plane support': 'Int'}, 'line5': {'Identification': 'line5', 'Length of panel': 3800.0, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 400.0, 'Web thick.': 12.0, 'Flange width': 250.0, 'Flange thick.': 14.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 102.7, 'Trans. stress 1': 100.0, 'Trans. stress 2': 100.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.406575, 'In-plane support': 'Int'}, 'line6': {'Identification': 'line6', 'Length of panel': 3700.0, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 400.0, 'Web thick.': 12.0, 'Flange width': 250.0, 'Flange thick.': 14.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 102.7, 'Trans. stress 1': 100.0, 'Trans. stress 2': 100.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.412197, 'In-plane support': 'Int'}, 'line7': {'Identification': 'line7', 'Length of panel': 3600.0, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 400.0, 'Web thick.': 12.0, 'Flange width': 250.0, 'Flange thick.': 12.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 101.5, 'Trans. stress 1': 100.0, 'Trans. stress 2': 100.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.422985, 'In-plane support': 'Int'}, 'line8': {'Identification': 'line8', 'Length of panel': 3500.0, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 400.0, 'Web thick.': 12.0, 'Flange width': 250.0, 'Flange thick.': 12.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 101.5, 'Trans. stress 1': 100.0, 'Trans. stress 2': 100.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.438508, 'In-plane support': 'Int'}, 'line9': {'Identification': 'line9', 'Length of panel': 3800.0, 'Stiffener spacing': 700.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 400.0, 'Web thick.': 12.0, 'Flange width': 200.0, 'Flange thick.': 18.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 100.7, 'Trans. stress 1': 100.0, 'Trans. stress 2': 100.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.459639, 'In-plane support': 'Int'}, 'line10': {'Identification': 'line10', 'Length of panel': 3800.0, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 400.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 50.0, 'Trans. stress 1': 100.0, 'Trans. stress 2': 100.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.487211, 'In-plane support': 'Int'}, 'line11': {'Identification': 'line11', 'Length of panel': 4000.0, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 500.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 50.0, 'Trans. stress 1': 100.0, 'Trans. stress 2': 100.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.521418, 'In-plane support': 'Int'}, 'line12': {'Identification': 'line12', 'Length of panel': 3905.1200000000003, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 500.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 50.0, 'Trans. stress 1': 100.0, 'Trans. stress 2': 100.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.557214, 'In-plane support': 'Int'}, 'line50': {'Identification': 'line50', 'Length of panel': 3000.0, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 340.0, 'Web thick.': 12.0, 'Flange width': 200.0, 'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 100.0, 'Trans. stress 2': 100.0, 'Shear stress': 3.0, 'Pressure (fixed)': 0.300313, 'In-plane support': 'Int'}, 'line51': {'Identification': 'line51', 'Length of panel': 3199.999999999999, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 340.0, 'Web thick.': 12.0, 'Flange width': 200.0, 'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 100.0, 'Trans. stress 2': 100.0, 'Shear stress': 3.0, 'Pressure (fixed)': 0.295486, 'In-plane support': 'Int'}, 'line52': {'Identification': 'line52', 'Length of panel': 3400.0000000000005, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 340.0, 'Web thick.': 12.0, 'Flange width': 200.0, 'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 100.0, 'Trans. stress 2': 100.0, 'Shear stress': 3.0, 'Pressure (fixed)': 0.248226, 'In-plane support': 'Int'}, 'line53': {'Identification': 'line53', 'Length of panel': 3400.0000000000005, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 340.0, 'Web thick.': 12.0, 'Flange width': 200.0, 'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 100.0, 'Trans. stress 2': 100.0, 'Shear stress': 3.0, 'Pressure (fixed)': 0.214038, 'In-plane support': 'Int'}, 'line54': {'Identification': 'line54', 'Length of panel': 3600.0, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 340.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 16.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 100.0, 'Trans. stress 2': 100.0, 'Shear stress': 3.0, 'Pressure (fixed)': 0.196177, 'In-plane support': 'Int'}, 'line55': {'Identification': 'line55', 'Length of panel': 3800.000000000001, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 340.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 16.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 100.0, 'Trans. stress 2': 100.0, 'Shear stress': 3.0, 'Pressure (fixed)': 0.189068, 'In-plane support': 'Int'}, 'line56': {'Identification': 'line56', 'Length of panel': 4000.0, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 340.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 16.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 100.0, 'Trans. stress 2': 100.0, 'Shear stress': 3.0, 'Pressure (fixed)': 0.105442, 'In-plane support': 'Int'}, 'line57': {'Identification': 'line57', 'Length of panel': 4000.0, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 340.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 16.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 100.0, 'Trans. stress 2': 100.0, 'Shear stress': 3.0, 'Pressure (fixed)': 0.155554, 'In-plane support': 'Int'}, 'line31': {'Identification': 'line31', 'Length of panel': 4000.0, 'Stiffener spacing': 700.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 14.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 3.0, 'Pressure (fixed)': 0.0325, 'In-plane support': 'Int'}, 'line32': {'Identification': 'line32', 'Length of panel': 3900.0000000000005, 'Stiffener spacing': 700.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 14.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 3.0, 'Pressure (fixed)': 0.0325, 'In-plane support': 'Int'}, 'line33': {'Identification': 'line33', 'Length of panel': 3799.999999999999, 'Stiffener spacing': 700.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 14.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 3.0, 'Pressure (fixed)': 0.0325, 'In-plane support': 'Int'}, 'line34': {'Identification': 'line34', 'Length of panel': 3699.999999999999, 'Stiffener spacing': 700.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 14.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 3.0, 'Pressure (fixed)': 0.0325, 'In-plane support': 'Int'}, 'line35': {'Identification': 'line35', 'Length of panel': 3600.0000000000014, 'Stiffener spacing': 700.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 14.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 3.0, 'Pressure (fixed)': 0.0325, 'In-plane support': 'Int'}, 'line36': {'Identification': 'line36', 'Length of panel': 3500.0, 'Stiffener spacing': 700.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 14.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 3.0, 'Pressure (fixed)': 0.0325, 'In-plane support': 'Int'}, 'line37': {'Identification': 'line37', 'Length of panel': 3800.000000000001, 'Stiffener spacing': 700.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 14.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 3.0, 'Pressure (fixed)': 0.0325, 'In-plane support': 'Int'}, 'line38': {'Identification': 'line38', 'Length of panel': 3800.000000000001, 'Stiffener spacing': 700.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 14.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 3.0, 'Pressure (fixed)': 0.0325, 'In-plane support': 'Int'}, 'line39': {'Identification': 'line39', 'Length of panel': 4000.0, 'Stiffener spacing': 700.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 14.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 3.0, 'Pressure (fixed)': 0.0325, 'In-plane support': 'Int'}, 'line40': {'Identification': 'line40', 'Length of panel': 3000.0, 'Stiffener spacing': 700.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 14.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 3.0, 'Pressure (fixed)': 0.0325, 'In-plane support': 'Int'}, 'line13': {'Identification': 'line13', 'Length of panel': 4000.0, 'Stiffener spacing': 775.0, 'Plate thickness': 20.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 450.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 90.0, 'Trans. stress 2': 90.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.436313, 'In-plane support': 'Int'}, 'line14': {'Identification': 'line14', 'Length of panel': 4000.0, 'Stiffener spacing': 775.0, 'Plate thickness': 20.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 450.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 90.0, 'Trans. stress 2': 90.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.436313, 'In-plane support': 'Int'}, 'line15': {'Identification': 'line15', 'Length of panel': 4000.0, 'Stiffener spacing': 775.0, 'Plate thickness': 20.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 450.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 90.0, 'Trans. stress 2': 90.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.436313, 'In-plane support': 'Int'}, 'line16': {'Identification': 'line16', 'Length of panel': 3699.999999999999, 'Stiffener spacing': 775.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 375.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 18.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 90.0, 'Trans. stress 2': 90.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.436313, 'In-plane support': 'Int'}, 'line17': {'Identification': 'line17', 'Length of panel': 3600.0000000000014, 'Stiffener spacing': 775.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 375.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 18.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 90.0, 'Trans. stress 2': 90.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.436313, 'In-plane support': 'Int'}, 'line18': {'Identification': 'line18', 'Length of panel': 3500.0, 'Stiffener spacing': 775.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 375.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 18.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 90.0, 'Trans. stress 2': 90.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.436313, 'In-plane support': 'Int'}, 'line19': {'Identification': 'line19', 'Length of panel': 3800.000000000001, 'Stiffener spacing': 775.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 375.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 18.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 90.0, 'Trans. stress 2': 90.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.436313, 'In-plane support': 'Int'}, 'line20': {'Identification': 'line20', 'Length of panel': 3800.000000000001, 'Stiffener spacing': 775.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 375.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 18.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 90.0, 'Trans. stress 2': 90.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.436313, 'In-plane support': 'Int'}, 'line41': {'Identification': 'line41', 'Length of panel': 5000.0, 'Stiffener spacing': 775.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 500.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 25.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 90.0, 'Trans. stress 2': 90.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.436313, 'In-plane support': 'Int'}, 'line43': {'Identification': 'line43', 'Length of panel': 3199.999999999999, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 325.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 16.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 80.0, 'Trans. stress 2': 80.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.393657, 'In-plane support': 'Int'}, 'line44': {'Identification': 'line44', 'Length of panel': 3400.0000000000005, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 325.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 16.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 80.0, 'Trans. stress 2': 80.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.348157, 'In-plane support': 'Int'}, 'line45': {'Identification': 'line45', 'Length of panel': 3400.0000000000005, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 325.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 16.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 80.0, 'Trans. stress 2': 80.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.299813, 'In-plane support': 'Int'}, 'line46': {'Identification': 'line46', 'Length of panel': 3600.0000000000014, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 325.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 16.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 80.0, 'Trans. stress 2': 80.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.251469, 'In-plane support': 'Int'}, 'line47': {'Identification': 'line47', 'Length of panel': 3800.000000000001, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 325.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 16.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 80.0, 'Trans. stress 2': 80.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.200281, 'In-plane support': 'Int'}, 'line48': {'Identification': 'line48', 'Length of panel': 4000.0, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 325.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 16.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 80.0, 'Trans. stress 2': 80.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.14625, 'In-plane support': 'Int'}, 'line49': {'Identification': 'line49', 'Length of panel': 4000.0, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 325.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 16.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 80.0, 'Trans. stress 2': 80.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.089375, 'In-plane support': 'Int'}, 'line58': {'Identification': 'line58', 'Length of panel': 2500.0, 'Stiffener spacing': 700.0, 'Plate thickness': 14.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 18.0, 'Flange width': 0.0, 'Flange thick.': 0.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 60.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 10.0, 'Pressure (fixed)': 0.0, 'In-plane support': 'Int'}, 'line59': {'Identification': 'line59', 'Length of panel': 2500.0, 'Stiffener spacing': 700.0, 'Plate thickness': 14.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 18.0, 'Flange width': 0.0, 'Flange thick.': 0.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 60.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 10.0, 'Pressure (fixed)': 0.0, 'In-plane support': 'Int'}, 'line61': {'Identification': 'line61', 'Length of panel': 2500.0, 'Stiffener spacing': 700.0, 'Plate thickness': 14.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 18.0, 'Flange width': 0.0, 'Flange thick.': 0.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 60.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 10.0, 'Pressure (fixed)': 0.0, 'In-plane support': 'Int'}, 'line62': {'Identification': 'line62', 'Length of panel': 2500.0, 'Stiffener spacing': 700.0, 'Plate thickness': 14.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 18.0, 'Flange width': 0.0, 'Flange thick.': 0.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 60.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 10.0, 'Pressure (fixed)': 0.0, 'In-plane support': 'Int'}, 'line63': {'Identification': 'line63', 'Length of panel': 2500.0, 'Stiffener spacing': 700.0, 'Plate thickness': 14.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 18.0, 'Flange width': 0.0, 'Flange thick.': 0.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 60.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 10.0, 'Pressure (fixed)': 0.0, 'In-plane support': 'Int'}, 'line64': {'Identification': 'line64', 'Length of panel': 2500.0, 'Stiffener spacing': 700.0, 'Plate thickness': 14.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 18.0, 'Flange width': 0.0, 'Flange thick.': 0.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 60.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 10.0, 'Pressure (fixed)': 0.0, 'In-plane support': 'Int'}, 'line65': {'Identification': 'line65', 'Length of panel': 2500.0, 'Stiffener spacing': 700.0, 'Plate thickness': 14.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 18.0, 'Flange width': 0.0, 'Flange thick.': 0.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 60.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 10.0, 'Pressure (fixed)': 0.0, 'In-plane support': 'Int'}, 'line66': {'Identification': 'line66', 'Length of panel': 2500.0, 'Stiffener spacing': 700.0, 'Plate thickness': 14.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 18.0, 'Flange width': 0.0, 'Flange thick.': 0.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 60.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 10.0, 'Pressure (fixed)': 0.0, 'In-plane support': 'Int'}, 'line21': {'Identification': 'line21', 'Length of panel': 4000.0, 'Stiffener spacing': 700.0, 'Plate thickness': 14.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 18.0, 'Flange width': 0.0, 'Flange thick.': 0.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 60.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 10.0, 'Pressure (fixed)': 0.0, 'In-plane support': 'Int'}, 'line42': {'Identification': 'line42', 'Length of panel': 3000.0, 'Stiffener spacing': 700.0, 'Plate thickness': 14.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 18.0, 'Flange width': 0.0, 'Flange thick.': 0.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 60.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 10.0, 'Pressure (fixed)': 0.0, 'In-plane support': 'Int'}, 'line22': {'Identification': 'line22', 'Length of panel': 3000.0, 'Stiffener spacing': 700.0, 'Plate thickness': 14.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 18.0, 'Flange width': 0.0, 'Flange thick.': 0.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 60.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 10.0, 'Pressure (fixed)': 0.0, 'In-plane support': 'Int'}, 'line67': {'Identification': 'line67', 'Length of panel': 3000.0, 'Stiffener spacing': 700.0, 'Plate thickness': 14.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 18.0, 'Flange width': 0.0, 'Flange thick.': 0.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 60.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 10.0, 'Pressure (fixed)': 0.0, 'In-plane support': 'Int'}, 'line68': {'Identification': 'line68', 'Length of panel': 3000.0, 'Stiffener spacing': 700.0, 'Plate thickness': 14.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 18.0, 'Flange width': 0.0, 'Flange thick.': 0.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 60.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 10.0, 'Pressure (fixed)': 0.0, 'In-plane support': 'Int'}, 'line69': {'Identification': 'line69', 'Length of panel': 3000.0, 'Stiffener spacing': 700.0, 'Plate thickness': 14.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 18.0, 'Flange width': 0.0, 'Flange thick.': 0.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 60.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 10.0, 'Pressure (fixed)': 0.0, 'In-plane support': 'Int'}, 'line70': {'Identification': 'line70', 'Length of panel': 3000.0, 'Stiffener spacing': 700.0, 'Plate thickness': 14.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 18.0, 'Flange width': 0.0, 'Flange thick.': 0.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 60.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 10.0, 'Pressure (fixed)': 0.0, 'In-plane support': 'Int'}, 'line71': {'Identification': 'line71', 'Length of panel': 3000.0, 'Stiffener spacing': 700.0, 'Plate thickness': 14.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 18.0, 'Flange width': 0.0, 'Flange thick.': 0.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 60.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 10.0, 'Pressure (fixed)': 0.0, 'In-plane support': 'Int'}, 'line72': {'Identification': 'line72', 'Length of panel': 3000.0, 'Stiffener spacing': 700.0, 'Plate thickness': 14.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 18.0, 'Flange width': 0.0, 'Flange thick.': 0.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 60.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 10.0, 'Pressure (fixed)': 0.0, 'In-plane support': 'Int'}, 'line73': {'Identification': 'line73', 'Length of panel': 3000.0, 'Stiffener spacing': 700.0, 'Plate thickness': 14.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 18.0, 'Flange width': 0.0, 'Flange thick.': 0.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 60.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 10.0, 'Pressure (fixed)': 0.0, 'In-plane support': 'Int'}, 'line60': {'Identification': 'line60', 'Length of panel': 2500.0, 'Stiffener spacing': 700.0, 'Plate thickness': 14.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 18.0, 'Flange width': 0.0, 'Flange thick.': 0.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 60.0, 'Trans. stress 1': 70.0, 'Trans. stress 2': 70.0, 'Shear stress': 10.0, 'Pressure (fixed)': 0.0, 'In-plane support': 'Int'}, 'line1': {'Identification': 'line1', 'Length of panel': 2500.0, 'Stiffener spacing': 700.0, 'Plate thickness': 14.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 250.0, 'Web thick.': 18.0, 'Flange width': 150.0, 'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 20.0, 'Trans. stress 1': 40.0, 'Trans. stress 2': 40.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.47186, 'In-plane support': 'Int'}, 'line2': {'Identification': 'line2', 'Length of panel': 3000.0, 'Stiffener spacing': 700.0, 'Plate thickness': 16.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'F', 'Stiffener boundary': 'C', 'Stiff. Height': 400.0, 'Web thick.': 18.0, 'Flange width': 150.0, 'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 20.0, 'Trans. stress 1': 40.0, 'Trans. stress 2': 40.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.387068, 'In-plane support': 'Int'}, 'line23': {'Identification': 'line23', 'Length of panel': 3000.0, 'Stiffener spacing': 750.0, 'Plate thickness': 15.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 350.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 90.0, 'Trans. stress 2': 90.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.387068, 'In-plane support': 'Int'}, 'line24': {'Identification': 'line24', 'Length of panel': 3200.0, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 350.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 90.0, 'Trans. stress 2': 90.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.349613, 'In-plane support': 'Int'}, 'line25': {'Identification': 'line25', 'Length of panel': 3400.0, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 350.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 90.0, 'Trans. stress 2': 90.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.309662, 'In-plane support': 'Int'}, 'line26': {'Identification': 'line26', 'Length of panel': 3400.0, 'Stiffener spacing': 750.0, 'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 320.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 90.0, 'Trans. stress 2': 90.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.267214, 'In-plane support': 'Int'}, 'line27': {'Identification': 'line27', 'Length of panel': 3600.0000000000014, 'Stiffener spacing': 750.0, 'Plate thickness': 15.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 320.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 90.0, 'Trans. stress 2': 90.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.224765, 'In-plane support': 'Int'}, 'line28': {'Identification': 'line28', 'Length of panel': 3800.000000000001, 'Stiffener spacing': 750.0, 'Plate thickness': 15.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 320.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 90.0, 'Trans. stress 2': 90.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.17982, 'In-plane support': 'Int'}, 'line29': {'Identification': 'line29', 'Length of panel': 4000.0, 'Stiffener spacing': 750.0, 'Plate thickness': 15.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 300.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 90.0, 'Trans. stress 2': 90.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.132378, 'In-plane support': 'Int'}, 'line30': {'Identification': 'line30', 'Length of panel': 4000.0, 'Stiffener spacing': 750.0, 'Plate thickness': 15.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C', 'Stiff. Height': 300.0, 'Web thick.': 12.0, 'Flange width': 150.0, 'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0, 'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0, 'Yield stress stiffener': 355.0, 'Axial stress': 40.0, 'Trans. stress 1': 90.0, 'Trans. stress 2': 90.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.082439, 'In-plane support': 'Int'}}
run_dict_one = {'line3': {'Identification': 'line3', 'Length of panel': 4000.0, 'Stiffener spacing': 700.0,
'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T',
'Stiffener boundary': 'C', 'Stiff. Height': 400.0, 'Web thick.': 12.0, 'Flange width': 200.0,
'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0,
'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0,
'Yield stress stiffener': 355.0, 'Axial stress': 101.7, 'Trans. stress 1': 100.0,
'Trans. stress 2': 100.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.41261,
'In-plane support': 'Int'}}
shell_dict = {'plate_thk': [20 / 1000, 'm'],
'radius': [5000 / 1000, 'm'],
'distance between rings, l': [700 / 1000, 'm'],
'length of shell, L': [5000 / 1000, 'm'],
'tot cyl length, Lc': [5000 / 1000, 'm'],
'eff. buckling lenght factor': [1, ''],
'mat_yield': [355 * 1e6, 'Pa'],
}
shell_main_dict = {'sasd': [-10e6, 'Pa'],
'smsd': [-10e6, 'Pa'],
'tTsd': [40* 1e6, 'Pa'],
'tQsd': [40* 1e6, 'Pa'],
'psd': [-0.1e6, 'Pa'],
'shsd': [0, 'Pa'],
'geometry': [3, '-'],
'material factor': [1.15, ''],
'delta0': [0.005, ''],
'fab method ring stf': [1, ''],
'fab method ring girder': [1, ''],
'E-module': [2.1e11, 'Pa'],
'poisson': [0.3, '-'],
'mat_yield': [355 * 1e6, 'Pa'],
'length between girders' : [None, 'm'],
'panel spacing, s' : [2, 'm'],
'ring stf excluded' : [False, ''],
'ring frame excluded' : [True, ''],
'end cap pressure': ['not included in axial stresses', ''],
'ULS or ALS': ['ULS', '']}
'''
self._length_between_girders = main_dict['length between girders'][0]
self._panel_spacing = main_dict['panel spacing, s'][0]
self.__ring_stiffener_excluded = main_dict['ring stf excluded'][0]
self.__ring_frame_excluded = main_dict['ring frame excluded'][0]'''
shell_main_dict2 = {'sasd': [79.58 * 1e6, 'Pa'],
'smsd': [31.89* 1e6, 'Pa'],
'tTsd': [12.73* 1e6, 'Pa'],
'tQsd': [4.77* 1e6, 'Pa'],
'psd': [-0.2* 1e6, 'Pa'],
'shsd': [0, 'Pa'],
'geometry': [5, '-'],
'material factor': [1.15, ''],
'delta0': [0.005, ''],
'fab method ring stf': [1, ''],
'fab method ring girder': [1, ''],
'E-module': [2.1e11, 'Pa'],
'poisson': [0.3, '-'],
'mat_yield': [355 * 1e6, 'Pa'],
'length between girders': [None, 'm'],
'panel spacing, s': [0.7, 'm'],
'ring stf excluded': [False, ''],
'ring frame excluded': [True, ''],
'end cap pressure': ['not included in axial stresses', ''],
'ULS or ALS': ['ULS', '']}
prescriptive_main_dict = dict()
prescriptive_main_dict['minimum pressure in adjacent spans'] = [None, '']
prescriptive_main_dict['material yield'] = [355e6, 'Pa']
prescriptive_main_dict['load factor on stresses'] = [1, '']
prescriptive_main_dict['load factor on pressure'] = [1, '']
prescriptive_main_dict['buckling method'] = ['ultimate', '']
prescriptive_main_dict['stiffener end support'] = ['Continuous', ''] # 'Continuous'
prescriptive_main_dict['girder end support'] = ['Continuous', ''] # 'Continuous'
prescriptive_main_dict['tension field'] = ['not allowed', ''] # 'not allowed'
prescriptive_main_dict['plate effective agains sigy'] = [True, ''] # True
prescriptive_main_dict['buckling length factor stf'] = [None, '']
prescriptive_main_dict['buckling length factor girder'] = [None, '']
prescriptive_main_dict['km3'] = [12, ''] # 12
prescriptive_main_dict['km2'] = [24, ''] # 24
prescriptive_main_dict['girder distance between lateral support'] = [None, '']
prescriptive_main_dict['stiffener distance between lateral support'] = [None, '']
prescriptive_main_dict['kgirder'] = [None, '']
prescriptive_main_dict['panel length, Lp'] = [None, '']
prescriptive_main_dict['pressure side'] = ['both sides', '']# either 'stiffener', 'plate', 'both'
prescriptive_main_dict['fabrication method stiffener'] = ['welded', '']
prescriptive_main_dict['fabrication method girder'] = ['welded', '']
prescriptive_main_dict['calculation domain'] = ['Flat plate, stiffened', '']
def get_slamming_pressure():
return 1000000
def get_fatigue_pressures():
return {'p_ext':{'loaded':50000,'ballast':60000,'part':0}, 'p_int':{'loaded':0, 'ballast':20000,'part':0}}
def get_fatigue_pressures_problematic():
return {'p_ext': {'loaded': 192632, 'ballast': 198705.5, 'part': 0},
'p_int': {'loaded': 0, 'ballast': 15118, 'part': 0}}
def get_loa_fls_load():
return load.Loads(loa_fls)
def get_loa_uls_load():
return load.Loads(loa_uls)
def get_bal_fls_load():
return load.Loads(bal_fls)
def get_bal_uls_load():
return load.Loads(bal_uls)
def get_object_dictionary():
return obj_dict
def get_structure_object(line=None):
if line in ('line12','line13','line11','line4'):
return calc_structure.CalcScantlings(obj_dict_fr)
else:
return calc_structure.CalcScantlings(obj_dict)
def get_structure_calc_object(line=None, heavy = False):
if line in ('line12','line13','line11','line4'):
return calc_structure.CalcScantlings(obj_dict_fr)
else:
return calc_structure.CalcScantlings(obj_dict if not heavy else obj_dict_heavy)
def get_fatigue_object():
return calc_structure.CalcFatigue(obj_dict, fat_obj_dict)
def get_fatigue_object_problematic():
return calc_structure.CalcFatigue(obj_dict_sec_error, fat_obj_dict_problematic)
def get_tank_object():
return load.Tanks(tank_dict=tank_dict_ballast)
def get_line_to_struc(geo = False):
to_return = {}
for line in line_dict.keys():
Plate = get_structure_object(line)
Stiffener = get_structure_object(line)
Girder = None # CalcScantlings(ex.obj_dict_heavy)
initial_calc_obj = calc_structure.AllStructure(Plate=Plate, Stiffener=Stiffener, Girder=Girder,
main_dict=prescriptive_main_dict)
to_return[line]=[initial_calc_obj, None, None, [None], {}]
return to_return
def get_default_stresses():
return {'BOTTOM':(100,100,50,50,5), 'BBS':(70,70,30,30,3), 'BBT':(80,80,30,3), 'HOPPER':(70,70,50,50,3),
'SIDE_SHELL':(100,100,40,40,3),'INNER_SIDE':(80,80,40,40,5), 'FRAME':(70,70,60,0,10),
'FRAME_WT':(70,70,60,0,10),'SSS':(100,100,50,50,20), 'MD':(70,70,4,40,3),
'GENERAL_INTERNAL_WT':(90,90,40,40,5),'GENERAL_INTERNAL_NONWT':(70,70,30,30,3),
'INTERNAL_1_MPA':(1,1,1,1,1), 'INTERNAL_LOW_STRESS_WT':(40,40,20,20,5)}
def get_opt_frames():
return opt_frames,['point1', 'point4', 'point8', 'point5']
def get_point_dict():
return point_dict
def get_line_dict():
return line_dict
def get_grid(origo,base_canvas_dim):
return grid.Grid(origo[1] + 1, base_canvas_dim[0] - origo[0] + 1)
def get_grid_no_inp(empty_grid = False):
origo = (50,670)
base_canvas_dim = [1000,720]
grid_return = grid.Grid(origo[1] + 1, base_canvas_dim[0] - origo[0] + 1)
if empty_grid:
return grid_return
for line,coords in get_to_draw().items():
for point in grid_return.get_points_along_line(coords[0],coords[1]):
grid_return.set_barrier(point[0],point[1])
return grid_return
def get_grid_empty():
origo = (50,670)
base_canvas_dim = [1000,720]
grid_return = grid.Grid(origo[1] + 1, base_canvas_dim[0] - origo[0] + 1)
return grid_return
def get_to_draw():
to_return = {}
for line in line_dict.keys():
p1 = line_dict[line][0]
p2 = line_dict[line][1]
p1_coord = point_dict['point'+str(p1)]
p2_coord = point_dict['point'+str(p2)]
point_coord = (p1_coord,p2_coord)
to_return[line]= get_grid_coord_from_points_coords(point_coord[0]),\
get_grid_coord_from_points_coords(point_coord[1])
return to_return
def get_geo_opt_presure():
return (200,200,200,200,200,200)
def get_random_pressure():
return 150 + 100*random.random()
def get_random_color():
return random.choice(['red','green','green','green'])
def get_geo_opt_object():
dicts = ({'mat_yield': [355000000.0, 'Pa'], 'span': [4.0, 'm'], 'spacing': [0.7, 'm'], 'plate_thk': [0.015, 'm'],
'stf_web_height': [0.4, 'm'], 'stf_web_thk': [0.012, 'm'], 'stf_flange_width': [0.15, 'm'],
'stf_flange_thk': [0.02, 'm'], 'structure_type': ['BOTTOM', ''], 'plate_kpp': [1, ''], 'stf_kps': [1, ''],
'stf_km1': [12, ''], 'stf_km2': [24, ''], 'stf_km3': [12, ''], 'sigma_y1': [80, 'MPa'],
'sigma_y2': [80, 'MPa'], 'sigma_x2': [80, 'MPa'], 'sigma_x1': [80, 'MPa'], 'tau_xy': [5, 'MPa'], 'stf_type': ['T', ''],
'structure_types': [structure_types, ''], 'zstar_optimization': [True, ''] },
{'mat_yield': [355000000.0, 'Pa'], 'span': [4.0, 'm'], 'spacing': [0.7, 'm'], 'plate_thk': [0.015, 'm'],
'stf_web_height': [0.4, 'm'], 'stf_web_thk': [0.012, 'm'], 'stf_flange_width': [0.15, 'm'],
'stf_flange_thk': [0.02, 'm'], 'structure_type': ['BOTTOM', ''], 'plate_kpp': [1, ''],
'stf_kps': [1, ''], 'stf_km1': [12, ''], 'stf_km2': [24, ''], 'stf_km3': [12, ''],
'sigma_y1': [80, 'MPa'], 'sigma_y2': [80, 'MPa'], 'sigma_x2': [80, 'MPa'], 'sigma_x1': [80, 'MPa'], 'tau_xy': [5, 'MPa'],
'stf_type': ['T', ''], 'structure_types': [structure_types, ''], 'zstar_optimization': [True, ''] },
{'mat_yield': [355000000.0, 'Pa'], 'span': [4.0, 'm'], 'spacing': [0.7, 'm'], 'plate_thk': [0.015, 'm'],
'stf_web_height': [0.4, 'm'], 'stf_web_thk': [0.012, 'm'], 'stf_flange_width': [0.15, 'm'],
'stf_flange_thk': [0.02, 'm'], 'structure_type': ['BOTTOM', ''], 'plate_kpp': [1, ''], 'stf_kps': [1, ''],
'stf_km1': [12, ''], 'stf_km2': [24, ''], 'stf_km3': [12, ''], 'sigma_y1': [80, 'MPa'],
'sigma_y2': [80, 'MPa'], 'sigma_x2': [80, 'MPa'], 'sigma_x1': [80, 'MPa'], 'tau_xy': [5, 'MPa'], 'stf_type': ['T', ''],
'structure_types': [structure_types, ''], 'zstar_optimization': [True, ''] },
{'mat_yield': [355000000.0, 'Pa'], 'span': [4.0, 'm'], 'spacing': [0.7, 'm'], 'plate_thk': [0.015, 'm'],
'stf_web_height': [0.4, 'm'], 'stf_web_thk': [0.012, 'm'], 'stf_flange_width': [0.15, 'm'],
'stf_flange_thk': [0.02, 'm'], 'structure_type': ['GENERAL_INTERNAL_WT', ''], 'plate_kpp': [1, ''], 'stf_kps': [1, ''],
'stf_km1': [12, ''], 'stf_km2': [24, ''], 'stf_km3': [12, ''], 'sigma_y1': [80, 'MPa'],
'sigma_y2': [80, 'MPa'], 'sigma_x2': [80, 'MPa'], 'sigma_x1': [80, 'MPa'], 'tau_xy': [5, 'MPa'], 'stf_type': ['T', ''],
'structure_types': [structure_types, ''], 'zstar_optimization': [True, ''] },
{'mat_yield': [355000000.0, 'Pa'], 'span': [4.0, 'm'], 'spacing': [0.7, 'm'], 'plate_thk': [0.015, 'm'],
'stf_web_height': [0.4, 'm'], 'stf_web_thk': [0.012, 'm'], 'stf_flange_width': [0.15, 'm'],
'stf_flange_thk': [0.02, 'm'], 'structure_type': ['GENERAL_INTERNAL_WT', ''], 'plate_kpp': [1, ''], 'stf_kps': [1, ''],
'stf_km1': [12, ''], 'stf_km2': [24, ''], 'stf_km3': [12, ''], 'sigma_y1': [80, 'MPa'],
'sigma_y2': [80, 'MPa'], 'sigma_x2': [80, 'MPa'], 'sigma_x1': [80, 'MPa'], 'tau_xy': [5, 'MPa'], 'stf_type': ['T', ''],
'structure_types': [structure_types, ''], 'zstar_optimization': [True, ''] },
{'mat_yield': [355000000.0, 'Pa'], 'span': [4.0, 'm'], 'spacing': [0.7, 'm'], 'plate_thk': [0.015, 'm'],
'stf_web_height': [0.4, 'm'], 'stf_web_thk': [0.012, 'm'], 'stf_flange_width': [0.15, 'm'],
'stf_flange_thk': [0.02, 'm'], 'structure_type': ['GENERAL_INTERNAL_WT', ''], 'plate_kpp': [1, ''], 'stf_kps': [1, ''],
'stf_km1': [12, ''], 'stf_km2': [24, ''], 'stf_km3': [12, ''], 'sigma_y1': [80, 'MPa'],
'sigma_y2': [80, 'MPa'], 'sigma_x2': [80, 'MPa'], 'sigma_x1': [80, 'MPa'], 'tau_xy': [5, 'MPa'], 'stf_type': ['T', ''],
'structure_types': [structure_types, ''], 'zstar_optimization': [True, ''] })
return [calc_structure.CalcScantlings(dic) for dic in dicts]
def get_geo_opt_fatigue():
return [get_fatigue_object() for dummy in range(len(get_geo_opt_presure()))]
def get_geo_opt_fat_press():
return [get_fatigue_pressures() for dummy in range(len(get_geo_opt_presure()))]
def get_geo_opt_fat_press():
return [get_fatigue_pressures() for dummy in range(len(get_geo_opt_presure()))]
def get_geo_opt_slamming_none():
return [0 for dummy in range(len(get_geo_opt_presure()))]
def get_geo_opt_slamming():
return [get_slamming_pressure() for dummy in range(len(get_geo_opt_presure()))]
def get_grid_coord_from_points_coords(point_coord):
'''
Converts coordinates to be used in the grid. Returns (row,col). This value will not change with slider.
:param point:
:return:
'''
canvas_origo = (50,670)
row = canvas_origo[1] - point_coord[1]*10
col = point_coord[0]*10
return (row,col)
def get_section_list():
''' Returning a section list. '''
import pl_stf_window as plstf
return [plstf.Section(obj_dict), plstf.Section(obj_dict2), plstf.Section(obj_dict_L)]
if __name__ == '__main__':
print(get_random_color()) | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/example_data.py | example_data.py |
import os # -*- coding: utf-8 -*-
import tkinter as tk
from tkinter import ttk
from tkinter import filedialog
from tkinter import messagebox
import decimal, pickle
from _tkinter import TclError
import multiprocessing
import ctypes
from matplotlib import pyplot as plt
import matplotlib
from reportlab.lib.pagesizes import letter, landscape
from reportlab.platypus import SimpleDocTemplate
from sklearn.neural_network import MLPClassifier
from sklearn.preprocessing import StandardScaler
from sklearn.metrics._pairwise_distances_reduction import _datasets_pair,_middle_term_computer
try:
from any_files.calc_structure import *
from any_files.calc_loads import *
import any_files.load_window as load_window
import any_files.make_grid_numpy as grid
import any_files.grid_window as grid_window
from any_files.helper import *
import any_files.optimize as op
import any_files.optimize_window as opw
import any_files.optimize_cylinder as opc
import any_files.optimize_multiple_window as opwmult
import any_files.optimize_geometry as optgeo
import any_files.pl_stf_window as struc
import any_files.stresses_window as stress
import any_files.fatigue_window as fatigue
import any_files.load_factor_window as load_factors
from any_files.report_generator import LetterMaker
import any_files.sesam_interface as sesam
except ModuleNotFoundError:
# This is due to pyinstaller issues.
from ANYstructure.any_files.calc_structure import *
from ANYstructure.any_files.calc_loads import *
import ANYstructure.any_files.load_window as load_window
import ANYstructure.any_files.make_grid_numpy as grid
import ANYstructure.any_files.grid_window as grid_window
from ANYstructure.any_files.helper import *
import ANYstructure.any_files.optimize as op
import ANYstructure.any_files.optimize_window as opw
import ANYstructure.any_files.optimize_cylinder as opc
import ANYstructure.any_files.optimize_multiple_window as opwmult
import ANYstructure.any_files.optimize_geometry as optgeo
import ANYstructure.any_files.pl_stf_window as struc
import ANYstructure.any_files.stresses_window as stress
import ANYstructure.any_files.fatigue_window as fatigue
import ANYstructure.any_files.load_factor_window as load_factors
from ANYstructure.any_files.report_generator import LetterMaker
import ANYstructure.any_files.sesam_interface as sesam
class Application():
'''
The Application class sets up the GUI using Tkinter.
It is the main part of the code and calls up all other classes etc.
'''
def __init__(self, parent):
'''
Initaiting the tkinter frame.
The GUI is general initiated in the method gui_init.
:param parent:
'''
super(Application, self).__init__()
parent.wm_title('| ANYstructure |')
self._parent = parent
parent.protocol("WM_DELETE_WINDOW", self.close_main_window)
parent.bind("<Configure>", self.resize)
self._root_dir = os.path.dirname(os.path.abspath(__file__))
#self._root_dir = os.path.dirname(os.path.abspath(__file__)).replace('any_files','')
# Main frame for the application
self._main_fr = ttk.Frame(parent)
self._main_fr.place(in_=parent, relwidth=1, relheight = 0.99)
# Definng general colors
self._general_color = 'alice blue'#"'azure2' # Color for backgrounds.
self._entry_color = 'white' # Entry fields color.
self._entry_text_color = 'black' # Entry field tex color
self._button_bg_color = 'LightBlue1'
self._button_fg_color = 'black'
self._color_text = 'white'
''' Setting the style of ttk'''
#
self._style = ttk.Style(parent)
# vista theme not available in linux
try:
self._style.theme_use('ITFT1')
#self._style.theme_use('vista')
except:
# available themes in linux:
# ('clam', 'alt', 'default', 'classic')
self._style.theme_use('clam')
self._style.layout("TNotebook", [])
self._style.configure("TNotebook", tabmargins=0)
# tabbed frames
self._tabControl = ttk.Notebook(parent)
self._tab_geo = ttk.Frame(self._tabControl, relief = 'flat')
self._tab_prop = ttk.Frame(self._tabControl, relief = 'flat')
self._tab_comp = ttk.Frame(self._tabControl, relief='flat')
self._tab_prop_tools = ttk.Frame(self._tabControl, relief='flat')
self._tab_information = ttk.Frame(self._tabControl, relief='flat')
self._tab_help = ttk.Frame(self._tabControl, relief='flat')
self._tabControl.add(self._tab_geo, text='Geometry')
self._tabControl.add(self._tab_prop, text='Line properties')
self._tabControl.add(self._tab_prop_tools, text='Properties tools')
self._tabControl.add(self._tab_comp, text='Compartments and loads')
self._tabControl.add(self._tab_information, text='Information')
self._tabControl.add(self._tab_help, text='Help')
self._tabControl.place(relwidth=0.2585, relheight = 1)
#self._tabControl.select(self._tab2)
# Top open/save/new
menu = tk.Menu(parent)
parent.config(menu=menu)
# menu, canvas, etc.
sub_menu = tk.Menu(menu)
menu.add_cascade(label='File', menu=sub_menu)
sub_menu.add_command(label='New project', command=self.reset)
sub_menu.add_command(label='Save project as...', command=self.savefile)
self.__last_save_file = None # Keeping the last filename and path
sub_menu.add_command(label='Save project, Alt-S', command=self.save_no_dialogue)
sub_menu.add_command(label='Open project', command=self.openfile)
sub_menu.add_command(label='Restore previous', command=self.restore_previous)
self._shortcut_text = 'CTRL-Z Undo geometry action\n' \
'CTRL-P Copy selected point\n' \
'CTRL-M Move selected point)\n' \
'CTRL-N Move selected line)\n' \
'CTRL-Q New line (right click two points)\n' \
'CTRL-S Assign structure prop. to line\n' \
'CTRL-A Select all lines (change param)\n' \
'CTRL-T Select all structure types (selected)\n' \
'CTRL-DELETE Delete structure prop. from line\n' \
'DELETE Delete active line and/or point \n' \
'CTRL-E Copy line properties from active line\n' \
'CTRL-D Paste line propeties to active line\n' \
'Mouse click left/right - select line/point\n' \
'Arrows left/right - previous/next line\n' \
'Arrows up/down - previous/next point'
''' END style setting'''
undo_redo = tk.Menu(menu)
menu.add_cascade(label='Geometry', menu=undo_redo)
undo_redo.add_command(label='Undo geometry action (CTRL-Z)', command=self.undo)
#undo_redo.add_command(label='Redo geometry action (CTRL-Y)', command=self.redo)
undo_redo.add_command(label='Copy selected point (CTRL-P)', command=self.copy_point)
undo_redo.add_command(label='Move selected point (CTRL-M)', command=self.move_point)
undo_redo.add_command(label='Move selected line (CTRL-N)', command=self.move_line)
undo_redo.add_command(label='New line (right click two points) (CTRL-Q)', command=self.new_line)
undo_redo.add_command(label='Assign structure properties to clicked line (CTRL-S)',
command=self.new_structure)
undo_redo.add_command(label='Delete structure properties from clicked line (CTRL-DELETE)',
command=self.delete_properties_pressed)
undo_redo.add_command(label='Delete active line and/or point (DELETE)',
command=self.delete_key_pressed)
undo_redo.add_command(label='Copy line properties from active line (CTRL-E)',
command=self.copy_property)
undo_redo.add_command(label='Paste line propeties to active line (CTRL-D)',
command=self.paste_property)
sub_report = tk.Menu(menu)
menu.add_cascade(label = 'Reporting', menu = sub_report)
sub_report.add_command(label = 'Generate PDF report', command = self.report_generate)
sub_report.add_command(label='Generate PDF result table', command=self.table_generate)
sub_report.add_command(label='Stiffened flat plate - Weight development, plates and beams', command=self.on_plot_cog_dev)
sub_sesam = tk.Menu(menu)
menu.add_cascade(label = 'Interfaces', menu = sub_sesam)
sub_sesam.add_command(label = 'Export geometry to SESAM GeniE JS', command = self.export_to_js)
sub_sesam.add_command(label='Run all PULS lines', command=self.puls_run_all_lines)
sub_sesam.add_command(label='Delete all PULS results', command=self.puls_delete_all)
sub_help = tk.Menu(menu)
menu.add_cascade(label='Help', menu = sub_help)
sub_help.add_command(label = 'Open website (documentation etc.)', command = self.open_documentation)
sub_help.add_command(label='Open documentation pdf', command=self.open_documentation_pdf)
sub_help.add_command(label='Donate!', command=self.open_donate)
sub_help.add_command(label = 'Open example file', command = self.open_example)
sub_help.add_command(label='About ANYstructure', command=self.open_about)
sub_colors = tk.Menu(menu)
menu.add_cascade(label='GUI', menu = sub_colors)
sub_colors.add_command(label='Colors - Default', command=lambda id="default": self.set_colors(id))
sub_colors.add_command(label = 'Colors - Light', command = lambda id = "light": self.set_colors(id))
sub_colors.add_command(label='Colors - Grey', command = lambda id = "grey": self.set_colors(id))
sub_colors.add_command(label='Colors - Dark', command = lambda id = "dark": self.set_colors(id))
sub_colors.add_command(label='Colors - Unicorn', command=lambda id="pink": self.set_colors(id))
sub_colors.add_command(label='Colors - Slava Ukraini', command=lambda id="SlavaUkraini": self.set_colors(id))
sub_colors.add_command(label='Functional - All items', command=lambda id="all items": self.set_colors(id))
sub_colors.add_command(label='Functional - Modelling', command=lambda id="modelling": self.set_colors(id))
sub_colors.add_command(label='Functional - Cylinder', command=lambda id="cylinder": self.set_colors(id))
#base_mult = 1.2
#base_canvas_dim = [int(1000 * base_mult),int(720*base_mult)] #do not modify this, sets the "orignal" canvas dimensions.
base_canvas_dim = [1000,720] #do not modify this, sets the "orignal" canvas dimensions.
self._canvas_dim = [int(base_canvas_dim[0] *1),
int(base_canvas_dim[1] *1)]
self._canvas_base_origo = [50, base_canvas_dim[1] - 50] # 50 bottom left location of the canvas, (0,0)
self._canvas_draw_origo = [self._canvas_base_origo[0], self._canvas_base_origo[1]+10]
self._previous_drag_mouse = list(self._canvas_draw_origo)
# Setting the fonts for all items in the application.
self.text_scale = 1
self._text_size = {'Text 14 bold': 'Verdana '+str(int(14*self.text_scale))+' bold',
'Text 16 bold': 'Verdana ' + str(int(16 * self.text_scale)) + ' bold',
'Text 18 bold': 'Verdana ' + str(int(18 * self.text_scale)) + ' bold',
'Text 12 bold': 'Verdana ' + str(int(12 * self.text_scale)) + ' bold',
'Text 10 bold': 'Verdana '+str(int(10*self.text_scale))+' bold',
'Text 9 bold': 'Verdana ' + str(int(9 * self.text_scale)) + ' bold',
'Text 8 bold': 'Verdana ' + str(int(8 * self.text_scale)) + ' bold',
'Text 8': 'Verdana ' + str(int(8 * self.text_scale)),
'Text 9': 'Verdana ' + str(int(8 * self.text_scale)),
'Text 7': 'Verdana ' + str(int(7 * self.text_scale)),
'Text 10': 'Verdana ' + str(int(10 * self.text_scale)),
'Text 7 bold': 'Verdana ' + str(int(7 * self.text_scale)) + ' bold',
'Text 6 bold': 'Verdana ' + str(int(6 * self.text_scale)) + ' bold'}
self._canvas_scale = 20 # Used for slider and can change
self._base_scale_factor = 10 # Used for grid and will not change, 10 is default
self._prop_canvas_scale = 100 # Scrolling for property canvas
# self._prop_canvas_x_base =
# self._prop_canvas_y_base =
# # Creating the various canvas next.
self._main_canvas = tk.Canvas(self._main_fr,
background=self._style.lookup('TFrame', 'background'), bd=0,
highlightthickness=0, relief='ridge')
self._prop_canvas = tk.Canvas(self._main_fr,
background=self._style.lookup('TFrame', 'background'), bd=0,
highlightthickness=0, relief='ridge')
self._result_canvas = tk.Canvas(self._main_fr,
background=self._style.lookup('TFrame', 'background'), bd=0,
highlightthickness=0, relief='ridge')
# # These frames are just visual separations in the GUI.
# frame_horizontal, frame_vertical = 0.73, 0.258
# self._frame_viz_hor = tk.Frame(self._main_fr, height=3, bg="black", colormap="new")
# self._frame_viz_hor.place(relx=0, rely=frame_horizontal, relwidth=1)
# self._frame_viz_ver = tk.Frame(self._main_fr, width=3, bg="black", colormap="new")
# self._frame_viz_ver.place(relx=frame_vertical,rely=0 * 1, relheight=1)
x_canvas_place = 0.26
self._main_canvas.place(relx=x_canvas_place, rely=0,relwidth=0.523, relheight = 0.73)
self._prop_canvas.place(relx=x_canvas_place, rely=0.73, relwidth=0.38, relheight = 0.27)
self._result_canvas.place(relx=x_canvas_place+0.38, rely=0.73, relwidth=0.36, relheight = 0.27)
# Point frame
self._pt_frame = tk.Frame(self._main_canvas, width=100, height=100, bg="black", relief='raised')
# Cylinder gui look placement of optmization button
self._gui_functional_look_cylinder_opt = [0.82, 0.008, 0.04, 0.175]
#
# -------------------------------------------------------------------------------------------------------------
#
# The dictionaries below are the main deictionaries used to define this application.
self._point_dict = {} # Main point dictionary (point:coords) - see method new_point
self._line_dict = {} # Main line dictionary (line:point,point) - see method new_line
self._line_to_struc = {} # Main line assosiations (line:various objects) - see method new_structure
# The dictionary is widely used and includes all classes in the program
# Example:
# 'line1':[Structure,CalcScantlings,Fatigue,Load,Combinations]
self._tank_dict = {} # Main tank dictionary (created when BFS search is executed for the grid) (comp# : TankObj)
self._load_dict = {} # Main load dictionary (created in separate load window (load# : [LoadObj, lines])
self._new_load_comb_dict = {} # Load combination dict.(comb,line,load) : [DoubleVar(), DoubleVar(), IntVar()]
# Example ('dnva', 'line25', 'comp3'), ('dnvb', 'line14', 'comp4'),
# ('manual', 'line74', 'manual'), ('tanktest', 'line76', 'comp3')
self._sections = list() # A list containing section property objects.
#
# -------------------------------------------------------------------------------------------------------------
#
self._pending_grid_draw = {} # Saving lines that shall be transferred to the calculation grid
# Load combinations definition used in method gui_load_combinations
# These are created and destroyed and is not permanent in the application.
self._lc_comb_created,self._comp_comb_created,self._manual_created, self._info_created = [],[],[], []
self._state_logger = dict() # Used to see if recalculation is needed.
self._weight_logger = {'new structure': {'COG': list(), 'weight': list(), 'time': list()}} # Recording of weight development
# The next dictionaries feed various infomation to the application
self._load_factors_dict = {'dnva':[1.3,1.2,0.7], 'dnvb':[1,1,1.2], 'tanktest':[1,1,0]} # DNV loads factors
self._accelerations_dict = {'static':9.81, 'dyn_loaded':0, 'dyn_ballast':0} # Vertical acclerations
self._load_conditions = ['loaded','ballast','tanktest', 'part','slamming'] # Should not be modified. Load conditions.
self._tank_options = {'ballast': 1025, 'crude_oil': 900, 'diesel': 850 , 'slop': 1050, 'fresh water': 1000} # Should not be modified.
self._default_stresses = {'BOTTOM':(100,100,50,50,5), 'BBS':(70,70,30,30,3), 'BBT':(80,80,30,3), 'HOPPER':(70,70,50,50,3),
'SIDE_SHELL':(100,100,40,40,3),'INNER_SIDE':(80,80,40,40,5), 'FRAME':(70,70,60,0,10),
'FRAME_WT':(70,70,60,0,10),'SSS':(100,100,50,50,20), 'MD':(70,70,4,40,3),
'GENERAL_INTERNAL_WT':(90,90,40,40,5),'GENERAL_INTERNAL_NONWT':(70,70,30,30,3),
'INTERNAL_1_MPA':(1,1,1,1,1), 'INTERNAL_LOW_STRESS_WT':(40,40,20,20,5)}
# The default stresses are used for buckling calculations.
self._structure_types = {'vertical':['BBS', 'SIDE_SHELL', 'SSS'],
'horizontal': ['BOTTOM', 'BBT', 'HOPPER', 'MD'],
'non-wt': ['FRAME', 'GENERAL_INTERNAL_NONWT'],
'internals': ['INNER_SIDE', 'FRAME_WT', 'GENERAL_INTERNAL_WT',
'INTERNAL_ZERO_STRESS_WT', 'INTERNAL_LOW_STRESS_WT']}
self._options_type = [op_typ for op_typ in self._default_stresses.keys()]
self._point_options= ['fixed','free']
self._load_window_couter = 1 # this is used to create the naming of the tanks in the load window
self._logger = {'added': list(), 'deleted': list()} # used to log operations for geometry operations, to be used for undo/redo
self.__returned_load_data = None # Temporary data for returned loads from the load window.
self.__previous_load_data = None # Used to compare loads before and after.
self.__copied_line_prop = None # Used to copy line properties to another.
self._PULS_results = None # If a puls run is avaliable, it is stored here.
self._center_of_buoyancy = dict() # Center of buoyancy for all and for carious static drafts
# Example {8: (5,20), 22: (12,20), 'all': (16,20)}
self._ML_buckling = dict() # Buckling machine learning algorithm
for name, file_base in zip(['cl SP buc int predictor', 'cl SP buc int scaler',
'cl SP ult int predictor', 'cl SP ult int scaler',
'cl SP buc GLGT predictor', 'cl SP buc GLGT scaler',
'cl SP ult GLGT predictor', 'cl SP ult GLGT scaler',
'cl UP buc int predictor', 'cl UP buc int scaler',
'cl UP ult int predictor', 'cl UP ult int scaler',
'cl UP buc GLGT predictor', 'cl UP buc GLGT scaler',
'cl UP ult GLGT predictor', 'cl UP ult GLGT scaler',
'CSR predictor UP', 'CSR scaler UP',
'CSR predictor SP', 'CSR scaler SP'
],
["ml_files\\CL_output_cl_buc_predictor_In-plane_support_cl_1_SP",
"ml_files\\CL_output_cl_buc_scaler_In-plane_support_cl_1_SP",
"ml_files\\CL_output_cl_ult_predictor_In-plane_support_cl_1_SP",
"ml_files\\CL_output_cl_ult_scaler_In-plane_support_cl_1_SP",
"ml_files\\CL_output_cl_buc_predictor_In-plane_support_cl_2,_3_SP",
"ml_files\\CL_output_cl_buc_scaler_In-plane_support_cl_2,_3_SP",
"ml_files\\CL_output_cl_ult_predictor_In-plane_support_cl_2,_3_SP",
"ml_files\\CL_output_cl_ult_scaler_In-plane_support_cl_2,_3_SP",
"ml_files\\CL_output_cl_buc_predictor_In-plane_support_cl_1_UP",
"ml_files\\CL_output_cl_buc_scaler_In-plane_support_cl_1_UP",
"ml_files\\CL_output_cl_ult_predictor_In-plane_support_cl_1_UP",
"ml_files\\CL_output_cl_ult_scaler_In-plane_support_cl_1_UP",
"ml_files\\CL_output_cl_buc_predictor_In-plane_support_cl_2,_3_UP",
"ml_files\\CL_output_cl_buc_scaler_In-plane_support_cl_2,_3_UP",
"ml_files\\CL_output_cl_ult_predictor_In-plane_support_cl_2,_3_UP",
"ml_files\\CL_output_cl_ult_scaler_In-plane_support_cl_2,_3_UP",
"ml_files\\CL_CSR-Tank_req_cl_predictor",
"ml_files\\CL_CSR-Tank_req_cl_UP_scaler",
"ml_files\\CL_CSR_plate_cl,_CSR_web_cl,_CSR_web_flange_cl,_CSR_flange_cl_predictor",
"ml_files\\CL_CSR_plate_cl,_CSR_web_cl,_CSR_web_flange_cl,_CSR_flange_cl_SP_scaler"]):
self._ML_buckling[name] = None
if os.path.isfile(file_base + '.pickle'):
file = open(file_base + '.pickle', 'rb')
self._ML_buckling[name] = pickle.load(file)
file.close()
else:
#file = open(self._root_dir +'\\' + file_base + '.pickle', 'rb')
ml_file = os.path.join(self._root_dir, file_base + '.pickle')
file = open(ml_file, 'rb')
self._ML_buckling[name] = pickle.load(file)
file.close()
self._ML_classes ={0: 'N/A',
1: 'A negative utilisation factor is found.',
2: 'At least one of the in-plane loads must be non-zero.',
3: 'Division by zero',
4: 'Overflow',
5: 'The aspect ratio exceeds the PULS code limit',
6: 'The global slenderness exceeds 4. Please reduce stiffener span or increase stiffener height.',
7: 'The applied pressure is too high for this plate field.', 8: 'web-flange-ratio',
9: 'UF below or equal 0.87', 10: 'UF between 0.87 and 1.0', 11: 'UF above 1.0'}
# Used to select parameter
self._stuctural_definition = ['mat_yield','mat_factor', 'span', 'spacing', 'plate_thk', 'stf_web_height',
'stf_web_thk',
'stf_flange_width', 'stf_flange_thk', 'structure_type', 'stf_type',
'sigma_y1', 'sigma_y2', 'sigma_x1','sigma_x2', 'tau_xy', 'plate_kpp', 'stf_kps','stf_km1',
'stf_km2', 'stf_km3', 'press_side', 'zstar_optimization',
'puls buckling method', 'puls boundary', 'puls stiffener end', 'puls sp or up',
'puls up boundary']
self._p1_p2_select = False
self._line_is_active = False # True when a line is clicked
self._active_line = '' # Name of the clicked point
self._point_is_active = False # True when a point is clicked
self._active_point = '' # Name of the clicked point
self.controls() # Function to activate mouse clicks
self._line_point_to_point_string = [] # This one ensures that a line is not created on top of a line
self._multiselect_lines = [] # A list used to select many lines. Used to set properties.
# Initsializing the calculation grid used for tank definition
self._grid_dimensions = [self._canvas_base_origo[1] + 1, base_canvas_dim[0] - self._canvas_base_origo[0] + 1]
#self._grid_dimensions = [self._canvas_base_origo[1], base_canvas_dim[0] - self._canvas_base_origo[0] + 1]
self._main_grid = grid.Grid(self._grid_dimensions[0], self._grid_dimensions[1])
self._grid_calc = None
self.text_widget = None
self._clicked_section_create= None # Identifiation of the button clicked. Sections.
self._gui_functional_look = 'all items' # used to change size and location of frames, canvas etc.
# These sets the location where entries are placed.
ent_x = 0.4
delta_y = 0.025
delta_x = 0.1
point_x_start, point_start = 0.005208333, 0.13
# ----------------------INITIATION OF THE SMALLER PARTS OF THE GUI STARTS HERE--------------------------
# Help tab
ttk.Label(self._tab_help, text='Buckling paramenter, flat plates', font="Text 10 bold", ) \
.place(relx=0.01, rely=0.05, )
try:
img_file_name = 'Panel_geometry_definitions.png'
if os.path.isfile('images/' + img_file_name):
file_path = 'images/' + img_file_name
else:
file_path = os.path.dirname(os.path.abspath(__file__)) + '/images/' + img_file_name
photo = tk.PhotoImage(file=file_path)
label = tk.Label(self._tab_help, image=photo)
label.image = photo # keep a reference!
label.place(relx = 0.01, rely = 0.1)
except TclError:
pass
ttk.Label(self._tab_help, text='Buckling parameters, cylinders',font="Text 10 bold", )\
.place(relx=0.01, rely=0.55)
try:
img_file_name = 'Buckling_Strength_of_Shells.png'
if os.path.isfile('images/' + img_file_name):
file_path = 'images/' + img_file_name
else:
file_path = os.path.dirname(os.path.abspath(__file__)) + '/images/' + img_file_name
photo = tk.PhotoImage(file=file_path)
label = tk.Label(self._tab_help, image=photo)
label.image = photo # keep a reference!
label.place(relx = 0.01, rely = 0.6)
except TclError:
pass
# --- point input/output ----
self._new_point_x = tk.DoubleVar()
self._new_point_y = tk.DoubleVar()
self._new_point_fix = tk.StringVar()
self._new_zstar_optimization = tk.BooleanVar()
self._new_zstar_optimization.set(True)
ent_width = 6 # width of entries
self._project_information = tk.Text(self._tab_geo, wrap = tk.WORD, relief = tk.FLAT)
self._project_information.place(relx=0.005, rely=0.005, relwidth = 0.95, relheight = 0.1)
self._project_information.insert(1.0, 'No information on project provided. Input here.')
ttk.Label(self._tab_geo, text='Input point coordinates [mm]', font=self._text_size['Text 9 bold'],
)\
.place(rely=point_start, relx=point_x_start, anchor = tk.NW)
ttk.Label(self._tab_geo, text='Point x (horizontal) [mm]:',font="Text 9", )\
.place(relx=point_x_start, rely=point_start+ delta_y,)
ttk.Label(self._tab_geo, text='Point y (vertical) [mm]:',font="Text 9", )\
.place(relx=point_x_start, rely=point_start + delta_y*2)
ttk.Entry(self._tab_geo, textvariable=self._new_point_x, width = int(ent_width * 1.5))\
.place(relx=ent_x, rely=point_start+ delta_y)
ttk.Entry(self._tab_geo, textvariable=self._new_point_y, width = int(ent_width * 1.5))\
.place(relx=ent_x, rely=point_start + delta_y*2)
ttk.Button(self._tab_geo, text='Add point (coords)', command=self.new_point,style = "Bold.TButton")\
.place(relx=ent_x + 2 * delta_x, rely=point_start+1*delta_y, relwidth = 0.3)
ttk.Button(self._tab_geo, text='Copy point (relative)', command=self.copy_point,style = "Bold.TButton")\
.place(relx=ent_x + 2 * delta_x, rely=point_start+2*delta_y,relwidth = 0.3)
ttk.Button(self._tab_geo, text='Move point', command=self.move_point,style = "Bold.TButton")\
.place(relx=ent_x + 2 * delta_x, rely=point_start+3*delta_y, relwidth = 0.3)
ttk.Button(self._tab_geo, text='Move line', command=self.move_line,style = "Bold.TButton")\
.place(relx=ent_x + 2 * delta_x,rely=point_start+4*delta_y, relwidth = 0.3)
self._new_draw_point_name = tk.BooleanVar()
self._new_draw_point_name.set(False)
ttk.Label(self._tab_geo, text='Show point names in GUI', font="Text 9")\
.place(relx=point_x_start, rely=point_start+3*delta_y)
ttk.Checkbutton(self._tab_geo, variable = self._new_draw_point_name, command = self.on_color_code_check)\
.place(relx=ent_x, rely=point_start+3*delta_y)
self._new_line_name = tk.BooleanVar()
self._new_line_name.set(False)
line_start, line_x = point_start+0.2, 0.0055
ttk.Label(self._tab_geo, text='Input line from "point number" to "point number"',
font=self._text_size['Text 9 bold'], )\
.place(rely=line_start, relx=line_x, anchor = tk.NW)
ttk.Label(self._tab_geo, text='Line from point number:',font="Text 9", )\
.place(relx=line_x, rely=line_start+delta_y)
ttk.Label(self._tab_geo, text='Line to point number:',font="Text 9", )\
.place(relx=line_x, rely=line_start+2*delta_y)
ttk.Label(self._tab_geo, text='Show line names in GUI', font="Text 9").place(relx=point_x_start,
rely=line_start+3*delta_y)
ttk.Checkbutton(self._tab_geo, variable=self._new_line_name,
command=self.on_color_code_check).place(relx=ent_x, rely=line_start+3*delta_y)
# --- line input/output ---
self._new_line_p1 = tk.IntVar()
self._new_line_p2 = tk.IntVar()
# Check boxes
self._new_shortcut_backdrop = tk.BooleanVar()
self._new_shortcut_backdrop.set(False)
self._new_colorcode_beams = tk.BooleanVar()
self._new_colorcode_beams.set(False)
self._new_colorcode_plates = tk.BooleanVar()
self._new_colorcode_plates.set(False)
self._new_colorcode_pressure = tk.BooleanVar()
self._new_colorcode_plates.set(False)
self._new_colorcode_utilization = tk.BooleanVar()
self._new_colorcode_utilization.set(False)
self._new_label_color_coding = tk.BooleanVar()
self._new_label_color_coding.set(False)
self._new_colorcode_sigmax = tk.BooleanVar()
self._new_colorcode_sigmax.set(False)
self._new_colorcode_sigmay1 = tk.BooleanVar()
self._new_colorcode_sigmay1.set(False)
self._new_colorcode_sigmay2 = tk.BooleanVar()
self._new_colorcode_sigmay2.set(False)
self._new_colorcode_tauxy = tk.BooleanVar()
self._new_colorcode_tauxy.set(False)
self._new_colorcode_structure_type = tk.BooleanVar()
self._new_colorcode_structure_type.set(False)
self._new_colorcode_section_modulus = tk.BooleanVar()
self._new_colorcode_section_modulus.set(False)
self._new_colorcode_fatigue = tk.BooleanVar()
self._new_colorcode_fatigue.set(False)
self._new_colorcode_puls_sp_or_up= tk.BooleanVar()
self._new_colorcode_puls_sp_or_up.set(False)
self._new_colorcode_puls_acceptance= tk.BooleanVar()
self._new_colorcode_puls_acceptance.set(False)
self._new_colorcode_total= tk.BooleanVar()
self._new_colorcode_total.set(False)
self._new_colorcode_spacing= tk.BooleanVar()
self._new_colorcode_spacing.set(False)
self._new_toggle_var = tk.StringVar()
self._new_toggle_select_multiple = tk.BooleanVar()
self._new_toggle_puls = tk.BooleanVar()
self._new_toggle_puls.set(False)
self._new_puls_uf = tk.DoubleVar()
self._new_puls_uf.set(0.87)
self._new_scale_stresses = tk.BooleanVar()
self._new_scale_stresses.set(False)
self._new_fup = tk.DoubleVar()
self._new_fup.set(0.5)
self._new_fdwn = tk.DoubleVar()
self._new_fdwn.set(1)
self._new_shifted_coords = tk.BooleanVar()
self._new_shifted_coords.set(False)
self._new_show_cog = tk.BooleanVar()
self._new_show_cog.set(False)
self._new_content_type = tk.StringVar()
self._new_content_type.set('ballast')
self._new_panel_or_shell = tk.StringVar()
self._new_panel_or_shell.set('panel')
self._new_shift_viz_coord_ver = tk.DoubleVar()
self._new_shift_viz_coord_ver.set(0)
self._new_shift_viz_coord_hor = tk.DoubleVar()
self._new_shift_viz_coord_hor.set(0)
line_start, line_x = point_start+0.2, 0.0055
ttk.Spinbox(self._tab_geo, textvariable=self._new_line_p1, width=int(ent_width * 1), from_ = 0,
to = float('inf')).place(relx=ent_x, rely=line_start+1*delta_y)
ttk.Spinbox(self._tab_geo, textvariable=self._new_line_p2, width=int(ent_width * 1),
from_ = 0, to = float('inf')).place(relx=ent_x, rely=line_start+2*delta_y)
ttk.Button(self._tab_geo, text='Add line', command=self.new_line,style = "Bold.TButton")\
.place(relx=ent_x + 2 * delta_x, rely=line_start+delta_y, relwidth = 0.3)
# --- delete points and lines ---
self._new_delete_line = tk.IntVar()
self._new_delete_point = tk.IntVar()
del_start, del_x = line_start + 0.2,0.005208333
ttk.Label(self._tab_geo, text='Delete lines and points (or left/right click and use "Delete key")',
font=self._text_size['Text 9 bold'], )\
.place(rely=del_start - 0.02,relx=del_x, anchor = tk.NW)
self._ent_delete_line = ttk.Spinbox(self._tab_geo, textvariable=self._new_delete_line,
from_ = 0, to = float('inf'),
width=int(ent_width * 1))
self._ent_delete_line.place(relx=ent_x, rely=del_start + delta_y)
self._ent_delete_point = ttk.Spinbox(self._tab_geo, textvariable=self._new_delete_point,
from_ = 0, to = float('inf'),
width=int(ent_width * 1))
self._ent_delete_point.place(relx=ent_x, rely=del_start + delta_y*2)
ttk.Label(self._tab_geo, text='Line number (left click):',font="Text 9")\
.place(relx=del_x, rely=del_start+ delta_y)
ttk.Label(self._tab_geo, text='Point number (right click):',font="Text 9", )\
.place(relx=del_x, rely=del_start+ delta_y*2)
ttk.Button(self._tab_geo, text='Delete line',command=self.delete_line,style = "Bold.TButton"
).place(relx=ent_x+delta_x*2, rely=del_start + delta_y,
relwidth = 0.3)
ttk.Button(self._tab_geo, text='Delete prop.',command=self.delete_properties_pressed,style = "Bold.TButton"
).place(relx=ent_x+delta_x*2, rely=del_start + delta_y*2,
relwidth = 0.3)
ttk.Button(self._tab_geo, text='Delete point',command=self.delete_point,style = "Bold.TButton"
).place(relx=ent_x+2*delta_x, rely=del_start + delta_y*3,
relwidth = 0.3)
# Shifing of coordinate display
shift_x = del_x
shift_y = del_start+0.2
ttk.Label(self._tab_geo, text='Shift coordinate labeling [mm]: ', font = self._text_size['Text 8 bold'])\
.place(relx=shift_x, rely=shift_y - delta_y*0.5)
ttk.Label(self._tab_geo, text='Used if you want a different origin of the repoted coordinates. \n'
'Does not affect loads.', font = self._text_size['Text 8'])\
.place(relx=shift_x, rely=shift_y + delta_y*0.5)
ttk.Label(self._tab_geo, text='y shift', font = self._text_size['Text 8'],
).place(relx=shift_x, rely=shift_y + delta_y * 2)
ttk.Label(self._tab_geo, text='x shift ', font = self._text_size['Text 8'],
).place(relx=shift_x, rely=shift_y + delta_y * 3)
self._ent_shift_hor = ttk.Entry(self._tab_geo, textvariable = self._new_shift_viz_coord_hor,
width = ent_width )
self._ent_shift_hor.bind('<FocusOut>', self.trace_shift_change)
self._ent_shift_ver = ttk.Entry(self._tab_geo, textvariable = self._new_shift_viz_coord_ver,
width = ent_width,
)
self._ent_shift_ver.bind('<FocusOut>', self.trace_shift_change)
#self._ent_shift_ver.trace('w', self.trace_shift_change)
self._ent_shift_hor.place(relx=ent_x, rely=shift_y + delta_y * 2)
self._ent_shift_ver.place(relx=ent_x, rely=shift_y + delta_y * 3)
ttk.Label(self._tab_geo, text='Use shifted coordinates', font="Text 9")\
.place(relx=shift_x, rely=shift_y + delta_y * 4)
ttk.Checkbutton(self._tab_geo, variable = self._new_shifted_coords, command = self.update_frame)\
.place(relx=ent_x, rely=shift_y + delta_y * 4)
# --- structure type information ---
def show_message():
messagebox.showinfo(title='Structure type',message='Types - sets default stresses (sigy1/sigy2/sigx/tauxy)'
'\n FOR DYNAMIC EQUATION THE FOLLOWING APPLIES'
'\n X (horizontal) used for BOTTOM, BBT, HOPPER, MD'
'\n Y (vertical) used for BBS, SIDE_SHELL, SSS'
'\n'
'\n Bottom (100/100/50/5) : BOTTOM '
'\n Bilge box side (70/70/30/3) : BBS '
'\n Bilge box top (80/80/30/3) : BBT '
'\n Hopper plate(70/70/50/3) : HOPPER'
'\n Side shell (100/100/40/3): SIDE_SHELL'
'\n Inner side (80/80/40/5): INNER_SIDE '
'\n Non WT self._main_fr (70/70/60/10): FRAME '
'\n WT self._main_fr (70/70/60/10): FRAME_WT '
'\n Internal BHD WT (70/70/50/10): INT_BHD'
'\n Main deck (70/70/40/3) : MD '
'\n General (WT) (90/90/40/5): '
'GENERAL_INTERNAL_WT'
'\n General (NONWT) (70/70/30/3): '
'GENERAL_INTERNAL_NONWT'
'\n Side shell slamming (100/100/50/20): SSS '
'\n Internal 1 MPa wt (1/1/1/1): INTERNAL_1_MPA '
'\n Internal low stress wt (40/40/20/5): '
'INTERNAL_LOW_STRESS_WT ')
vert_start = 0.1
hor_start = 0.02
# Toggle buttons
ttk.Label(self._tab_prop_tools, text='Change one property for multiple lines here. \n'
'1. Press mulitple select button\n'
'2. Select parameter in option menu\n'
'3. Press Change parameters button', font=self._text_size['Text 9'])\
.place(relx = hor_start, rely=vert_start-1*delta_y)
self._toggle_btn = tk.Button(self._tab_prop_tools, text="Toggle select\nmultiple", relief="raised",
command=self.toggle_select_multiple, bg = '#E1E1E1', activebackground = '#E5F1FB' )
self._toggle_change_param = ttk.Button(self._tab_prop_tools, text="Change parameters",
command=self.toggle_set_variable)
self._toggle_param_to_change = None
self._toggle_btn.place(relx=hor_start, rely=vert_start+2*delta_y, relwidth = 0.2, relheight = 0.06)
self._toggle_change_param.place(relx=hor_start+ delta_x*6, rely=vert_start+2*delta_y, relwidth = 0.25)
self._toggle_choose = ttk.OptionMenu(self._tab_prop_tools, self._new_toggle_var,self._stuctural_definition[0],
*self._stuctural_definition,
command = self.update_frame)
self._toggle_choose.place(relx=hor_start+ delta_x*3, rely=vert_start+2*delta_y, relwidth = 0.25)
ttk.Label(self._tab_prop_tools, text='Scale stresses when changing properties', font=self._text_size['Text 9'])\
.place(relx = hor_start + delta_x*1, rely=vert_start+6*delta_y)
ttk.Checkbutton(self._tab_prop_tools, variable = self._new_scale_stresses, command = self.on_color_code_check)\
.place(relx = hor_start + delta_x*0, rely=vert_start+6*delta_y)
ttk.Label(self._tab_prop_tools, text='Factor when scaling stresses up, fup', font=self._text_size['Text 8']).place(relx =hor_start + delta_x,
rely=vert_start+7*delta_y)
ttk.Label(self._tab_prop_tools, text='Factor when scaling stresses down, fdown', font=self._text_size['Text 8']).place(relx =hor_start + delta_x,
rely=vert_start+8*delta_y)
ent_fup = ttk.Entry(self._tab_prop_tools, textvariable=self._new_fup)
ent_fup.place(relx =hor_start, rely=vert_start+7*delta_y, relwidth = 0.1)
ent_fdwn = ttk.Entry(self._tab_prop_tools, textvariable=self._new_fdwn)
ent_fdwn.place(relx =hor_start, rely=vert_start+8*delta_y, relwidth = 0.1)
# --- main variable to define the structural properties ---
self._new_material = tk.DoubleVar()
self._new_material_factor = tk.DoubleVar()
self._new_field_len = tk.DoubleVar()
self._new_stf_spacing = tk.DoubleVar()
self._new_plate_thk = tk.DoubleVar()
self._new_stf_web_h = tk.DoubleVar()
self._new_stf_web_t = tk.DoubleVar()
self._new_stf_fl_w = tk.DoubleVar()
self._new_stf_fl_t = tk.DoubleVar()
self._new_stucture_type = tk.StringVar()
self._new_stucture_type_label = tk.StringVar()
self._new_sigma_y1 = tk.DoubleVar()
self._new_sigma_y2 = tk.DoubleVar()
self._new_sigma_x1 = tk.DoubleVar()
self._new_sigma_x2 = tk.DoubleVar()
self._new_tauxy = tk.DoubleVar()
self._new_stf_km1 = tk.DoubleVar()
self._new_stf_km2 = tk.DoubleVar()
self._new_stf_km3 = tk.DoubleVar()
self._new_stf_kps = tk.DoubleVar()
self._new_plate_kpp = tk.DoubleVar()
self._new_stf_type = tk.StringVar()
self._new_girder_web_h = tk.DoubleVar()
self._new_girder_web_t = tk.DoubleVar()
self._new_girder_fl_w = tk.DoubleVar()
self._new_girder_fl_t = tk.DoubleVar()
self._new_girder_type = tk.StringVar()
self._new_girder_length_LG = tk.DoubleVar()
self._new_panel_length_Lp = tk.DoubleVar()
self._new_pressure_side = tk.StringVar()
self._new_puls_method = tk.StringVar()
self._new_puls_panel_boundary = tk.StringVar()
self._new_puls_sp_or_up = tk.StringVar()
self._new_puls_up_boundary = tk.StringVar()
self._new_buckling_min_press_adj_spans = tk.DoubleVar()
self._new_buckling_lf_stresses = tk.DoubleVar()
self._new_buckling_stf_end_support = tk.StringVar()
self._new_buckling_girder_end_support = tk.StringVar()
self._new_buckling_tension_field = tk.StringVar()
self._new_buckling_effective_against_sigy = tk.StringVar()
self._new_buckling_length_factor_stf = tk.DoubleVar()
self._new_buckling_length_factor_girder = tk.DoubleVar()
self._new_buckling_km3 = tk.DoubleVar()
self._new_buckling_km2 = tk.DoubleVar()
self._new_buckling_stf_dist_bet_lat_supp = tk.DoubleVar()
self._new_buckling_girder_dist_bet_lat_supp = tk.DoubleVar()
self._new_buckling_fab_method_stf = tk.StringVar()
self._new_buckling_fab_method_girder = tk.StringVar()
self._new_buckling_lf_stresses.set(1)
self._new_buckling_stf_end_support.set('Continuous')
self._new_buckling_girder_end_support.set('Continuous')
self._new_buckling_tension_field.set('not allowed')
self._new_buckling_effective_against_sigy.set("Stf. pl. effective against sigma y")
self._new_buckling_km3.set(12)
self._new_buckling_km2.set(24)
self._new_buckling_fab_method_stf.set('welded')
self._new_buckling_fab_method_girder.set('welded')
# Setting default values to tkinter variables
self._new_material.set(355)
self._new_field_len.set(4000)
self._new_stf_spacing.set(750)
self._new_plate_thk.set(18)
self._new_stf_web_h.set(400)
self._new_stf_web_t.set(12)
self._new_stf_fl_w.set(150)
self._new_stf_fl_t.set(20)
self._new_girder_web_h.set(800)
self._new_girder_web_t.set(20)
self._new_girder_fl_w.set(200)
self._new_girder_fl_t.set(30)
self._new_girder_length_LG.set(10000)
self._new_panel_length_Lp.set(0)
self._new_sigma_y1.set(80)
self._new_sigma_y2.set(80)
self._new_sigma_x1.set(50)
self._new_sigma_x2.set(50)
self._new_stf_km1.set(12)
self._new_stf_km2.set(24)
self._new_stf_km3.set(12)
self._new_stf_kps.set(1)
self._new_plate_kpp.set(1)
self._new_material_factor.set(1.15)
self._new_overpresure = tk.DoubleVar()
self._new_overpresure.set(25000)
self._new_density = tk.DoubleVar()
self._new_density.set(1025)
self._new_max_el = tk.DoubleVar()
self._new_min_el = tk.DoubleVar()
self._new_calculation_domain = tk.StringVar()
self._new_stucture_type.set('GENERAL_INTERNAL_WT')
self.option_meny_structure_type_trace(event='GENERAL_INTERNAL_WT')
self._new_stf_type.set('T')
self._new_pressure_side.set('both sides')
self._new_puls_method.set('ultimate')
self._new_puls_panel_boundary.set('Int')
self._new_puls_sp_or_up.set('SP')
self._new_puls_up_boundary.set('SSSS')
#self._new_calculation_domain.set('Stiffened panel, flat')
self._new_calculation_domain.set('Flat plate, stiffened')
#self._new_material_factor.trace('w', self.trace_material_factor)
# --- main entries and labels to define the structural properties ---
ent_width = 12 #width of entries
'''
Flat plate input
'''
self._flat_gui_headlines = [ttk.Label(self._tab_prop, text='Plate input',
font = self._text_size['Text 8 bold']),
ttk.Label(self._tab_prop, text='Stiffener',
font = self._text_size['Text 8 bold']),
ttk.Label(self._tab_prop, text='Girder',
font = self._text_size['Text 8 bold']),
ttk.Label(self._tab_prop, text='Load/stresses input',
font = self._text_size['Text 8 bold']),
ttk.Label(self._tab_prop, text='Special provitions input',
font = self._text_size['Text 8 bold']),
ttk.Label(self._tab_prop, text='Buckling input',
font = self._text_size['Text 8 bold']),
ttk.Label(self._tab_prop, text='Stiffener',
font=self._text_size['Text 8 bold']),
ttk.Label(self._tab_prop, text='Girder',
font=self._text_size['Text 8 bold']),
]
self._ent_field_len = ttk.Entry(self._tab_prop, textvariable=self._new_field_len, width = int(10))
self._ent_stf_spacing = ttk.Entry(self._tab_prop, textvariable=self._new_stf_spacing, width = int(10))
self._ent_plate_thk = ttk.Entry(self._tab_prop, textvariable=self._new_plate_thk, width = int(10))
self._ent_girder_length = ttk.Entry(self._tab_prop, textvariable=self._new_girder_length_LG, width = int(10))
self._ent_panel_length = ttk.Entry(self._tab_prop, textvariable=self._new_panel_length_Lp, width = int(10))
self._lab_span = ttk.Label(self._tab_prop, text='Stiffener/plate length', )
self._lab_s = ttk.Label(self._tab_prop, text='Stiffener spacing/plate width', )
self._lab_pl_thk = ttk.Label(self._tab_prop, text='Plate thickness', )
self._lab_girder_length_LG = ttk.Label(self._tab_prop, text='Girder length, LG')
self._lab_gpanel_length_Lp = ttk.Label(self._tab_prop, text='Panel length, Lp')
self._flat_gui_plate = [self._ent_field_len, self._ent_stf_spacing, self._ent_plate_thk,
self._ent_girder_length, self._ent_panel_length]
self._flat_gui_lab_plate = [self._lab_span, self._lab_s, self._lab_pl_thk, self._lab_girder_length_LG,
self._lab_gpanel_length_Lp]
self._btn_flat_stf_section = ttk.Button(self._tab_prop, text='Stiffener',
command= lambda id= "flat stf": self.on_open_structure_window(id))
self._ent_stf_type = ttk.OptionMenu(self._tab_prop, self._new_stf_type, 'T', *['T', 'FB', 'L', 'L-bulb'])
self._ent_stf_web_h = ttk.Entry(self._tab_prop, textvariable=self._new_stf_web_h, width = int(10))
self._ent_stf_web_t = ttk.Entry(self._tab_prop, textvariable=self._new_stf_web_t, width = int(10))
self._ent_stf_fl_w = ttk.Entry(self._tab_prop, textvariable=self._new_stf_fl_w, width = int(10))
self._ent_str_fl_t = ttk.Entry(self._tab_prop, textvariable=self._new_stf_fl_t, width = int(10))
self._lab_stf_section = ttk.Label(self._tab_prop, text='')
self._lab_stf_type = ttk.Label(self._tab_prop, text='Stiffener/girder type')
self._lab_web_h = ttk.Label(self._tab_prop, text='Web height, hw', )
self._lab_web_thk = ttk.Label(self._tab_prop, text='Web thickness, tw', )
self._lab_fl_w= ttk.Label(self._tab_prop, text='Flange width, b', )
self._lab_fl_thk = ttk.Label(self._tab_prop, text='Flange thickeness, tf', )
self._flat_gui_stf = [self._btn_flat_stf_section, self._ent_stf_type, self._ent_stf_web_h, self._ent_stf_web_t,
self._ent_stf_fl_w,self._ent_str_fl_t]
self._flat_gui_lab_stf = [self._lab_stf_section,self._lab_stf_type,self._lab_web_h,self._lab_web_thk,
self._lab_fl_w, self._lab_fl_thk]
self._btn_flat_girder_section = ttk.Button(self._tab_prop, text='Girder',
command= lambda id= "flat girder": self.on_open_structure_window(id))
self._ent_girder_type = ttk.OptionMenu(self._tab_prop, self._new_girder_type, 'T', *['T', 'FB', 'L', 'L-bulb'])
self._ent_girder_web_h = ttk.Entry(self._tab_prop, textvariable=self._new_girder_web_h, width = int(10))
self._ent_girder_web_t = ttk.Entry(self._tab_prop, textvariable=self._new_girder_web_t, width = int(10))
self._ent_girder_fl_w = ttk.Entry(self._tab_prop, textvariable=self._new_girder_fl_w, width = int(10))
self._ent_girder_fl_t = ttk.Entry(self._tab_prop, textvariable=self._new_girder_fl_t, width = int(10))
self._flat_gui_girder = [self._btn_flat_girder_section, self._ent_girder_type, self._ent_girder_web_h,
self._ent_girder_web_t,
self._ent_girder_fl_w,self._ent_girder_fl_t]
self._ent_plate_kpp = ttk.Entry(self._tab_prop, textvariable=self._new_plate_kpp, width = int(5*1))
self._ent_plate_kps = ttk.Entry(self._tab_prop, textvariable=self._new_stf_kps, width = int(5*1))
self._ent_stf_km1 = ttk.Entry(self._tab_prop, textvariable=self._new_stf_km1, width = int(5*1))
self._ent_stf_km2 = ttk.Entry(self._tab_prop, textvariable=self._new_stf_km2,width = int(5*1))
self._ent_stf_km3 = ttk.Entry(self._tab_prop, textvariable=self._new_stf_km3, width = int(5*1))
self._lab_kpp = ttk.Label(self._tab_prop,text='kpp', )
self._lab_kps = ttk.Label(self._tab_prop, text='kps', )
self._lab_km1 = ttk.Label(self._tab_prop, text='km1', )
self._lab_km2 = ttk.Label(self._tab_prop, text='km2', )
self._lab_km3 = ttk.Label(self._tab_prop, text='km3', )
self._flat_gui_os_c101_provisions = [self._ent_plate_kpp, self._ent_plate_kps, self._ent_stf_km1,
self._ent_stf_km2, self._ent_stf_km3]
self._flat_gui_lab_os_c101_provisions = [self._lab_kpp, self._lab_kps, self._lab_km1,self._lab_km2,
self._lab_km3]
self._ent_pressure_side = ttk.OptionMenu(self._tab_prop,self._new_pressure_side,('both sides','plate side',
'stiffener side')[0],
*('both sides','plate side','stiffener side'))
self._ent_sigma_y1= ttk.Entry(self._tab_prop, textvariable=self._new_sigma_y1, width = int(10))
self._ent_sigma_y2 = ttk.Entry(self._tab_prop, textvariable=self._new_sigma_y2, width=int(10))
self._ent_sigma_x1 = ttk.Entry(self._tab_prop, textvariable=self._new_sigma_x1, width=int(10))
self._ent_sigma_x2 = ttk.Entry(self._tab_prop, textvariable=self._new_sigma_x2, width=int(10))
self._ent_tauxy = ttk.Entry(self._tab_prop, textvariable=self._new_tauxy, width=int(10))
self._ent_mat = ttk.Entry(self._tab_prop, textvariable=self._new_material, width = int(10))
self._ent_mat_factor = ttk.Entry(self._tab_prop, textvariable=self._new_material_factor, width = int(10))
self._ent_structure_type = ttk.OptionMenu(self._tab_prop, self._new_stucture_type, self._options_type[0],
*self._options_type,command = self.option_meny_structure_type_trace)
self._lab_press_side = ttk.Label(self._tab_prop, text='Overpressure side')
self._lab_sig_x1 = ttk.Label(self._tab_prop, text='Axial stress 1,sig_x1')
self._lab_sig_x2 = ttk.Label(self._tab_prop, text='Axial stress 2,sig_x2')
self._lab_sig_y1 = ttk.Label(self._tab_prop, text='Trans. stress 1,sig_y1')
self._lab_sig_y2 = ttk.Label(self._tab_prop, text='Trans. stress 2,sig_y2')
self._lab_tau_y1 = ttk.Label(self._tab_prop, text='Shear Stres,tau_y1')
self._lab_yield = ttk.Label(self._tab_prop, text='Material yield stress [MPa]:', font = self._text_size['Text 9'])
self._lab_mat_fac = ttk.Label(self._tab_prop, text='Mat. factor', font = self._text_size['Text 9'])
self._lab_structure_type = ttk.Label(self._tab_prop, text='Select structure type:',
font=self._text_size['Text 9'])
self._flat_gui_lab_loads = [self._lab_press_side , self._lab_sig_x1,
self._lab_sig_x2, self._lab_sig_y1, self._lab_sig_y2,
self._lab_tau_y1, self._lab_yield, self._lab_mat_fac,
self._lab_structure_type]
self._flat_gui_loads = [self._ent_pressure_side, self._ent_sigma_x1,
self._ent_sigma_x2, self._ent_sigma_y1, self._ent_sigma_y2, self._ent_tauxy,
self._ent_mat, self._ent_mat_factor,
self._ent_structure_type]
self._new_buckling_method = tk.StringVar()
options = ['DNV-RP-C201 - prescriptive','DNV PULS','ML-CL (PULS based)']
self._lab_buckling_method = ttk.Label(self._tab_prop, text='Set buckling method')
self._buckling_method = ttk.OptionMenu(self._tab_prop, self._new_buckling_method, options[0], *options,
command=self.update_frame)
# PULS interface
self._puls_run_all = ttk.Button(self._tab_prop, text='Run PULS -\nupdate results',
command=self.puls_run_all_lines)
self._ent_puls_uf = ttk.Entry(self._tab_prop, textvariable=self._new_puls_uf, width=int(ent_width * 1))
self._new_puls_uf.trace('w', self.trace_acceptance_change)
self._ent_puls_sp_or_up= ttk.OptionMenu(self._tab_prop, self._new_puls_sp_or_up, 'SP', *['SP', 'UP'],
command = self.trace_puls_up_or_sp,)
self._ent_puls_method = ttk.OptionMenu(self._tab_prop, self._new_puls_method,'buckling',
*['buckling', 'ultimate'])
self._ent_puls_panel_boundary = ttk.OptionMenu(self._tab_prop, self._new_puls_panel_boundary,'Int',
*['Int', 'GL', 'GT'])
#self._ent_puls_stf_end_type = ttk.OptionMenu(self._tab_prop, self._new_buckling_stf_end_support,'C',*['C', 'S'])
self._ent_puls_stf_end_type = ttk.OptionMenu(self._tab_prop, self._new_buckling_stf_end_support, 'Continuous',
*['Continuous', 'Sniped'])
self._ent_puls_up_boundary = ttk.Entry(self._tab_prop, textvariable=self._new_puls_up_boundary, width=int(7*1))
self._zstar_chk = ttk.Checkbutton(self._tab_prop, variable=self._new_zstar_optimization)
self._lab_puls_input = ttk.Label(self._tab_prop, text='Buckling paramenters input',
font=self._text_size['Text 8 bold'])
self._flat_gui_buc_lab_stf_girder = [ttk.Label(self._tab_prop, text='End support'),
ttk.Label(self._tab_prop, text='Fabrication method'),
ttk.Label(self._tab_prop, text='Buckling length factor'),
ttk.Label(self._tab_prop, text='Distance between lateral support'),
ttk.Label(self._tab_prop, text='Tension field action:')]
self._flat_gui_buc_stf_opt = [ttk.OptionMenu(self._tab_prop, self._new_buckling_stf_end_support,
'Continuous', *['Continuous', 'Sniped']),
ttk.OptionMenu(self._tab_prop, self._new_buckling_fab_method_stf,
'welded', *['welded', 'rolled']),
ttk.Entry(self._tab_prop, textvariable=self._new_buckling_length_factor_stf,
width=int(ent_width * 1)),
ttk.Entry(self._tab_prop, textvariable=self._new_buckling_stf_dist_bet_lat_supp
,width=int(ent_width * 1)),
ttk.OptionMenu(self._tab_prop, self._new_buckling_tension_field,
'not allowed', *['allowed', 'not allowed'])]
self._flat_gui_buc_girder_opt = [ttk.OptionMenu(self._tab_prop, self._new_buckling_girder_end_support,
'Continuous', *['Continuous', 'Sniped']),
ttk.OptionMenu(self._tab_prop, self._new_buckling_fab_method_girder,
'welded',*['welded', 'rolled']),
ttk.Entry(self._tab_prop, textvariable=self._new_buckling_length_factor_girder
,width=int(ent_width * 1)),
ttk.Entry(self._tab_prop,
textvariable=self._new_buckling_girder_dist_bet_lat_supp,
width=int(ent_width * 1)),
ttk.OptionMenu(self._tab_prop, self._new_buckling_effective_against_sigy,
'Stf. pl. effective against sigma y',
*['Stf. pl. effective against sigma y',
'All sigma y to girder'])]
self._flat_gui_girder_moment_factor = [ ttk.Label(self._tab_prop, text='Girder moment factor at support/midspan'),
ttk.Entry(self._tab_prop, textvariable=self._new_buckling_km3,
width=int(ent_width * 1)),
ttk.Entry(self._tab_prop, textvariable=self._new_buckling_km2,
width=int(ent_width * 1))]
self._flat_gui_buc_lab_common = [ttk.Label(self._tab_prop, text='Minimum pressure in adjacent spans'),
ttk.Label(self._tab_prop, text='Load factor on stresses')]
self._flat_gui_buc_common_opt = [ttk.Entry(self._tab_prop, textvariable=self._new_buckling_min_press_adj_spans,
width=int(ent_width * 1)),
ttk.Entry(self._tab_prop, textvariable=self._new_buckling_lf_stresses,
width=int(ent_width * 1))]
self._lab_puls_acceptance= ttk.Label(self._tab_prop, text='PULS acceptance')
self._lab_puls_uf = ttk.Label(self._tab_prop, text='PULS utilization factor:')
self._lab_puls_int_gt = ttk.Label(self._tab_prop, text='PULS Int-integrated GL-free left/right GT-free top/bottom')
self._lab_puls_cont_sniped = ttk.Label(self._tab_prop, text='Continous or Sniped',
font = self._text_size['Text 8'])
self._lab_puls_up_supp = ttk.Label(self._tab_prop, text='PULS UP support - left,right,upper,lower\n'
'S: simply supported C: Continuous',
font = self._text_size['Text 8'])
# self._zstar_label = ttk.Label(self._tab_prop, text='z* optimization (buckling RP-C201)',
# font=self._text_size['Text 8'])
self._flat_gui_buckling = [self._ent_puls_method, self._ent_puls_uf,
self._ent_puls_panel_boundary,
self._ent_puls_up_boundary]#, self._zstar_chk]
self._flat_gui_lab_buckling = [self._lab_puls_acceptance, self._lab_puls_uf,
self._lab_puls_int_gt,
self._lab_puls_up_supp]#, self._zstar_label]
self._button_str_type = ttk.Button(self._tab_prop, text='Show structure types', command=show_message)
self._structure_types_label = ttk.Label(textvariable = self._new_stucture_type_label,
font = self._text_size['Text 8'], )
#-------------Color coding-------------------
self._chk_cc_spacing = ttk.Checkbutton(self._tab_information, variable = self._new_colorcode_spacing,
command = self.on_color_code_check)
self._chk_button_sigmax1 = ttk.Checkbutton(self._tab_information, variable = self._new_colorcode_sigmax,
command = self.on_color_code_check)
self._chk_button_sigmax2 = ttk.Checkbutton(self._tab_information, variable = self._new_colorcode_sigmax,
command = self.on_color_code_check)
self._chk_button_sigmay1 = ttk.Checkbutton(self._tab_information, variable = self._new_colorcode_sigmay1,
command = self.on_color_code_check)
self._chk_button_sigmay2 = ttk.Checkbutton(self._tab_information, variable = self._new_colorcode_sigmay2,
command = self.on_color_code_check)
self._chk_button_tauxy = ttk.Checkbutton(self._tab_information, variable = self._new_colorcode_tauxy,
command = self.on_color_code_check)
self._chk_button_structure_type = ttk.Checkbutton(self._tab_information, variable = self._new_colorcode_structure_type,
command = self.on_color_code_check)
self._chk_button_puls_spup = ttk.Checkbutton(self._tab_information, variable=self._new_colorcode_puls_sp_or_up,
command=self.on_color_code_check)
self._chk_button_puls_acceptance =ttk.Checkbutton(self._tab_information, variable=self._new_colorcode_puls_acceptance,
command=self.on_color_code_check)
chk_deltax = 0.1
chk_deltay = 0.025
self._information_gui_chk_structure = [
ttk.Checkbutton(self._tab_information,
variable = self._new_label_color_coding,
command = self.on_color_code_check),
ttk.Checkbutton(self._tab_information,
variable = self._new_show_cog,
command = self.update_frame),
ttk.Checkbutton(self._tab_information,
variable = self._new_shortcut_backdrop,
command = self.update_frame),
ttk.Checkbutton(self._tab_information,
variable = self._new_colorcode_beams,
command = self.on_color_code_check),
ttk.Checkbutton(self._tab_information,
variable = self._new_colorcode_plates,
command = self.on_color_code_check),
ttk.Checkbutton(self._tab_information,
variable = self._new_colorcode_pressure,
command = self.on_color_code_check),
ttk.Checkbutton(self._tab_information,
variable = self._new_colorcode_utilization,
command = self.on_color_code_check),
ttk.Checkbutton(self._tab_information,
variable = self._new_colorcode_section_modulus,
command = self.on_color_code_check),
ttk.Checkbutton(self._tab_information,
variable = self._new_colorcode_fatigue,
command = self.on_color_code_check),
ttk.Checkbutton(self._tab_information,
variable = self._new_colorcode_total,
command = self.on_color_code_check),
self._chk_cc_spacing, self._chk_button_sigmax1,
self._chk_button_sigmax2,
self._chk_button_sigmay1, self._chk_button_sigmay2,
self._chk_button_tauxy,self._chk_button_structure_type ,
self._chk_button_puls_spup,
self._chk_button_puls_acceptance]
self._information_gui_lab_chk_structure = [
ttk.Label(self._tab_information, text='Label color code', font="Text 9"),
ttk.Label(self._tab_information, text='Show COG/COB', font="Text 9"),
ttk.Label(self._tab_information, text='Check to see avaliable shortcuts', font="Text 9"),
ttk.Label(self._tab_information, text='Beam prop.', font="Text 9"),
ttk.Label(self._tab_information, text='Plate thk.', font="Text 9"),
ttk.Label(self._tab_information, text='Pressure', font="Text 9"),
ttk.Label(self._tab_information, text='Buckling UF', font="Text 9"),
ttk.Label(self._tab_information, text='Sec. mod. UF', font="Text 9"),
ttk.Label(self._tab_information, text='Fatigue UF', font="Text 9"),
ttk.Label(self._tab_information, text='Total UF', font="Text 9"),
ttk.Label(self._tab_information, text='Stiffener spacing'),
ttk.Label(self._tab_information, text='Stresses, sigma x1'),
ttk.Label(self._tab_information, text='Stresses, sigma x2'),
ttk.Label(self._tab_information, text='Stresses, sigma y1'),
ttk.Label(self._tab_information, text='Stresses, sigma y2'),
ttk.Label(self._tab_information, text='Stresses, sigma tauxy'),
ttk.Label(self._tab_information, text='Structure type'),
ttk.Label(self._tab_information, text='Buckling - SP or UP'),
ttk.Label(self._tab_information, text='Buckling acceptance criteria')]
idx = 2
for lab, ent in zip(self._information_gui_chk_structure, self._information_gui_lab_chk_structure):
lab.place(relx=0.02, rely=idx*chk_deltay)
ent.place(relx=0.02 + chk_deltax, rely=idx*chk_deltay)
idx += 1
try:
img_file_name = 'img_stf_button.gif'
if os.path.isfile('images/' + img_file_name):
file_path = 'images/' + img_file_name
else:
file_path = self._root_dir + '/images/' + img_file_name
photo = tk.PhotoImage(file=file_path)
self._stf_button = tk.Button(self._tab_prop, image=photo,
command= self.on_open_structure_window)
self._stf_button.image = photo
except TclError:
self._stf_button = tk.Button(self._tab_prop, text='STF.',
command= self.on_open_structure_window,
bg=self._button_bg_color, fg=self._button_fg_color)
try:
img_file_name = 'img_stress_button.gif'
if os.path.isfile('images/' + img_file_name):
file_path = 'images/' + img_file_name
else:
file_path = self._root_dir + '/images/' + img_file_name
photo = tk.PhotoImage(file=file_path)
self._stress_button = tk.Button(self._tab_prop, image=photo, command=self.on_open_stresses_window,
fg=self._button_fg_color, bg='white')
self._stress_button.image = photo
except TclError:
self._stress_button = tk.Button(self._tab_prop, text='STRESS', command=self.on_open_stresses_window,
bg=self._button_bg_color, fg=self._button_fg_color)
try:
img_file_name = 'fls_button.gif'
if os.path.isfile('images/' + img_file_name):
file_path = 'images/' + img_file_name
else:
file_path = self._root_dir + '/images/' + img_file_name
photo = tk.PhotoImage(file=file_path)
self._fls_button = tk.Button(self._tab_prop, image=photo, command=self.on_open_fatigue_window,
bg=self._button_bg_color)
self._fls_button.image = photo
except TclError:
self._fls_button = tk.Button(self._tab_prop, text='FLS', command=self.on_open_fatigue_window,
bg=self._button_bg_color, fg=self._button_fg_color, )
self.add_stucture = ttk.Button(self._tab_prop, text='Press to add input properties\n'
'to the selected line. Sets all\n'
'basic structural information.', command=self.new_structure,
style = "Bold.TButton")
''' Start shell input '''
'''
Shell input
'''
self._new_shell_thk = tk.DoubleVar()
self._new_shell_radius = tk.DoubleVar()
self._new_shell_dist_rings = tk.DoubleVar()
self._new_shell_length = tk.DoubleVar()
self._new_shell_tot_length= tk.DoubleVar()
self._new_shell_k_factor = tk.DoubleVar()
self._new_shell_yield = tk.DoubleVar()
self._new_shell_mat_factor = tk.DoubleVar()
self._new_shell_poisson = tk.DoubleVar()
self._new_shell_e_module = tk.DoubleVar()
self._new_shell_ring_stf_fab_method = tk.IntVar()
self._new_shell_ring_frame_fab_method = tk.IntVar()
self._new_shell_exclude_ring_stf = tk.BooleanVar()
self._new_shell_exclude_ring_frame = tk.BooleanVar()
self._new_shell_panel_spacing = tk.DoubleVar()
self._new_shell_thk.set(20)
self._new_shell_radius.set(5000)
self._new_shell_dist_rings.set(5000)
self._new_shell_length.set(5000)
self._new_shell_tot_length.set(5000)
self._new_shell_k_factor.set(1)
self._new_shell_yield.set(355)
self._new_shell_mat_factor.set(1.15)
self._new_shell_poisson.set(0.3)
self._new_shell_e_module.set(2.1e11)
self._new_shell_ring_stf_fab_method.set(1)
self._new_shell_ring_frame_fab_method.set(2)
self._new_shell_panel_spacing.set(2000)
self._new_shell_exclude_ring_stf.set(False)
self._new_shell_exclude_ring_frame.set(False)
self._shell_gui_items = list()
self._lab_shell = ttk.Label(self._tab_prop, text='Shell and curved plate input [mm]')
self._ent_shell_plate_thk = ttk.Entry(self._tab_prop, textvariable=self._new_shell_thk)
self._ent_shell_radius = ttk.Entry(self._tab_prop, textvariable=self._new_shell_radius)
self._ent_shell_dist_rings = ttk.Entry(self._tab_prop, textvariable=self._new_shell_dist_rings)
self._ent_shell_length = ttk.Entry(self._tab_prop, textvariable=self._new_shell_length,width = int(5*1))
self._ent_shell_tot_length = ttk.Entry(self._tab_prop, textvariable=self._new_shell_tot_length,
)
self._ent_shell_k_factor= ttk.Entry(self._tab_prop, textvariable=self._new_shell_k_factor,
)
self._ent_shell_material_factor= ttk.Entry(self._tab_prop, textvariable=self._new_shell_mat_factor)
self._shell_gui_items = [self._lab_shell, self._ent_shell_plate_thk, self._ent_shell_radius,
self._ent_shell_dist_rings,
self._ent_shell_length,self._ent_shell_tot_length,self._ent_shell_k_factor,
self._ent_shell_material_factor]
'''
Shell, lognitudinal stiffeners
'''
# USING stiffeners for flat plates
self._lab_shell_long_stiffener = ttk.Label(self._tab_prop, text='Longitudinal stiffener properties [mm]',
)
self._btn_shell_stf_section_long_stf = ttk.Button(self._tab_prop, text='STF',
command= lambda id= "long stf": self.on_open_structure_window(id))
self._shell_long_stf_gui_items = [self._lab_shell_long_stiffener ,self._ent_stf_web_h, self._ent_stf_web_t,
self._ent_stf_fl_w, self._ent_str_fl_t, self._ent_stf_spacing,
self._ent_stf_type,self._btn_shell_stf_section_long_stf]
'''
Shell, ring stiffener
'''
self._lab_shell_ring_stiffener = ttk.Label(self._tab_prop, text='Ring stiffener properties [mm]')
self._new_shell_ring_stf_hw = tk.DoubleVar()
self._new_shell_ring_stf_tw = tk.DoubleVar()
self._new_shell_ring_stf_b = tk.DoubleVar()
self._new_shell_ring_stf_tf = tk.DoubleVar()
self._new_shell_ring_stf_tripping_brackets = tk.DoubleVar()
self._new_shell_ring_stf_type = tk.StringVar()
self._new_shell_ring_stf_hw.set(300)
self._new_shell_ring_stf_tw.set(12)
self._new_shell_ring_stf_b.set(120)
self._new_shell_ring_stf_tf.set(20)
self._new_shell_ring_stf_tripping_brackets.set(0)
self._new_shell_ring_stf_type.set('T')
self._ent_shell_ring_stf_hw = ttk.Entry(self._tab_prop, textvariable=self._new_shell_ring_stf_hw,
width = int(5*1), )
self._ent_shell_ring_stf_tw = ttk.Entry(self._tab_prop, textvariable=self._new_shell_ring_stf_tw,
)
self._ent_shell_ring_stf_b = ttk.Entry(self._tab_prop, textvariable=self._new_shell_ring_stf_b,
)
self._ent_shell_ring_stf_tf = ttk.Entry(self._tab_prop, textvariable=self._new_shell_ring_stf_tf,
)
self._ent_shell_ring_stf_tripping_brackets = ttk.Entry(self._tab_prop,
textvariable=self._new_shell_ring_stf_tripping_brackets,
)
self._ent_shell_ring_stf_type = ttk.OptionMenu(self._tab_prop, self._new_shell_ring_stf_type,'T',
*['T', 'FB', 'L', 'L-bulb'])
self._chk_shell_ring_frame_exclude = ttk.Checkbutton(self._tab_prop,
variable = self._new_shell_exclude_ring_stf,
command = self.calculation_domain_selected)
self._btn_shell_stf_section_ring_stf = ttk.Button(self._tab_prop,text = 'STF',
command= lambda id= "ring stf":
self.on_open_structure_window(id))
self._shell_ring_stf_gui_items = [self._lab_shell_ring_stiffener,self._ent_shell_ring_stf_hw,
self._ent_shell_ring_stf_tw,self._ent_shell_ring_stf_b,
self._ent_shell_ring_stf_tf, self._ent_shell_ring_stf_tripping_brackets,
self._ent_shell_ring_stf_type, self._chk_shell_ring_frame_exclude,
self._btn_shell_stf_section_ring_stf]
'''
Shell ring girder/frame
'''
self._lab_shell_ring_frame = ttk.Label(self._tab_prop, text='Ring frame/girder properties [mm]',
)
self._new_shell_ring_frame_hw = tk.DoubleVar()
self._new_shell_ring_frame_tw = tk.DoubleVar()
self._new_shell_ring_frame_b = tk.DoubleVar()
self._new_shell_ring_frame_tf = tk.DoubleVar()
self._new_shell_ring_frame_tripping_brackets = tk.DoubleVar()
self._new_shell_ring_frame_l_between_girders = tk.DoubleVar()
self._new_shell_ring_frame_type = tk.StringVar()
self._new_shell_ring_frame_hw.set(300)
self._new_shell_ring_frame_tw.set(12)
self._new_shell_ring_frame_b.set(120)
self._new_shell_ring_frame_tf.set(20)
self._new_shell_ring_frame_tripping_brackets.set(0)
self._new_shell_ring_frame_type.set('T')
self._new_shell_ring_frame_length_between_girders = tk.DoubleVar()
self._new_shell_ring_frame_length_between_girders.set(2500)
self._ent_shell_ring_frame_hw = ttk.Entry(self._tab_prop, textvariable=self._new_shell_ring_frame_hw,
width=int(5 * 1), )
self._ent_shell_ring_frame_tw = ttk.Entry(self._tab_prop, textvariable=self._new_shell_ring_frame_tw,
)
self._ent_shell_ring_frame_b = ttk.Entry(self._tab_prop, textvariable=self._new_shell_ring_frame_b,
)
self._ent_shell_ring_frame_tf = ttk.Entry(self._tab_prop, textvariable=self._new_shell_ring_frame_tf,
)
self._ent_shell_ring_frame_tripping_brackets = ttk.Entry(self._tab_prop,
textvariable=self._new_shell_ring_frame_tripping_brackets,
)
self._ent_shell_ring_frame_l_between_girders = ttk.Entry(self._tab_prop,
textvariable=self._new_shell_ring_frame_length_between_girders,
)
self._ent_shell_ring_stf_type = ttk.OptionMenu(self._tab_prop, self._new_shell_ring_frame_type,'T',
*['T', 'FB', 'L', 'L-bulb'])
self._chk_shell_ring_frame_exclude = ttk.Checkbutton(self._tab_prop,
variable = self._new_shell_exclude_ring_frame,
command = self.calculation_domain_selected)
self._btn_shell_stf_section_ring_frame = ttk.Button(self._tab_prop, text='STF',command= lambda id= "ring frame": self.on_open_structure_window(id))
self._shell_ring_frame_gui_items = [self._lab_shell_ring_stiffener, self._ent_shell_ring_frame_hw,
self._ent_shell_ring_frame_tw, self._ent_shell_ring_frame_b,
self._ent_shell_ring_frame_tf, self._ent_shell_ring_frame_tripping_brackets,
self._ent_shell_ring_frame_l_between_girders,
self._ent_shell_ring_stf_type, self._chk_shell_ring_frame_exclude,
self._btn_shell_stf_section_ring_frame]
'''
Shell/panel load data
'''
self._lab_shell_loads = ttk.Label(self._tab_prop, text='Load data, compression pressure,\n stresses and '
'forces negative.',
)
self._new_shell_stress_or_force = tk.IntVar()
self._new_shell_stress_or_force.set(1)
self._ent_shell_force_input = ttk.Radiobutton(self._tab_prop, text="Force input",
variable=self._new_shell_stress_or_force, value=1,
command = self.calculation_domain_selected)
self._ent_shell_stress_input = ttk.Radiobutton(self._tab_prop, text="Stress input",
variable=self._new_shell_stress_or_force, value=2,
command = self.calculation_domain_selected)
self._new_shell_Nsd = tk.DoubleVar()
self._new_shell_Msd = tk.DoubleVar()
self._new_shell_Tsd = tk.DoubleVar()
self._new_shell_Qsd = tk.DoubleVar()
self._new_shell_psd = tk.DoubleVar()
self._new_shell_Nsd.set(500000)
self._new_shell_Msd.set(500000)
self._new_shell_Tsd.set(40000)
self._new_shell_Qsd.set(1500)
self._new_shell_psd.set(-0.2)
self._new_shell_uls_or_als = tk.StringVar()
self._new_shell_end_cap_pressure_included = tk.StringVar()
self._new_shell_fab_ring_stf = tk.StringVar()
self._new_shell_fab_ring_frame = tk.StringVar()
self._new_shell_uls_or_als.set('ULS')
self._new_shell_end_cap_pressure_included.set('not included in axial force')
self._new_shell_fab_ring_stf.set('Fabricated')
self._new_shell_fab_ring_frame.set('Cold formed')
self._lab_shell_limit_state = ttk.Label(self._tab_prop, text='Limit state:', font=self._text_size['Text 9 bold'],
)
self._lab_shell_en_cap_pressure = ttk.Label(self._tab_prop, text='End cap pressure is', font=self._text_size['Text 8'],
)
self._lab_shell_fab_stf = ttk.Label(self._tab_prop, text='Fabrictaion method ring stiffener.:', font=self._text_size['Text 8'],
)
self._lab_shell_fab_frame = ttk.Label(self._tab_prop, text='Fabrictaion method ring gird.:', font=self._text_size['Text 8'],
)
self._new_shell_sasd = tk.DoubleVar()
self._new_shell_smsd = tk.DoubleVar()
self._new_shell_tTsd = tk.DoubleVar()
self._new_shell_tQsd = tk.DoubleVar()
self._new_shell_shsd = tk.DoubleVar()
self._ent_shell_uls_or_als = ttk.OptionMenu(self._tab_prop, self._new_shell_uls_or_als,'ULS', *['ULS', 'ALS'])
self._ent_shell_end_cap_pressure_included = ttk.OptionMenu(self._tab_prop,
self._new_shell_end_cap_pressure_included,
'included in axial force',
*['not included in axial force',
'included in axial force'])
self._ent_shell_fab_ring_stf = ttk.OptionMenu(self._tab_prop, self._new_shell_fab_ring_stf,'Fabricated',
*['Fabricated', 'Cold formed'])
self._ent_shell_fab_ring_frame = ttk.OptionMenu(self._tab_prop, self._new_shell_fab_ring_frame,'Fabricated',
*['Fabricated', 'Cold formed'])
self._ent_shell_yield = ttk.Entry(self._tab_prop, textvariable=self._new_shell_yield,
)
self._ent_shell_Nsd = ttk.Entry(self._tab_prop, textvariable=self._new_shell_Nsd,
width=int(5 * 1), )
self._ent_shell_Msd = ttk.Entry(self._tab_prop, textvariable=self._new_shell_Msd,
width=int(5 * 1), )
self._ent_shell_Tsd = ttk.Entry(self._tab_prop, textvariable=self._new_shell_Tsd,
width=int(5 * 1), )
self._ent_shell_Qsd = ttk.Entry(self._tab_prop, textvariable=self._new_shell_Qsd,
width=int(5 * 1), )
self._ent_shell_psd = ttk.Entry(self._tab_prop, textvariable=self._new_shell_psd,
width=int(5 * 1), )
self._ent_shell_sasd = ttk.Entry(self._tab_prop, textvariable=self._new_shell_sasd,
width=int(5 * 1), )
self._ent_shell_smsd = ttk.Entry(self._tab_prop, textvariable=self._new_shell_smsd,
width=int(5 * 1), )
self._ent_shell_tTsd = ttk.Entry(self._tab_prop, textvariable=self._new_shell_tTsd,
width=int(5 * 1), )
self._ent_shell_tQsd = ttk.Entry(self._tab_prop, textvariable=self._new_shell_tQsd,
width=int(5 * 1), )
self._new_shell_psd = self._new_shell_psd
self._ent_shell_shsd = ttk.Entry(self._tab_prop, textvariable=self._new_shell_shsd,
width=int(5 * 1), )
# Load information button
self._shell_btn_load_info = ttk.Button(self._tab_prop, text='Load info',
command=lambda id= "shell": self.stress_information_notebooks(id),
style = "Bold.TButton")
self._flat_btn_load_info = ttk.Button(self._tab_prop, text='Load info',
command=lambda id= "flat": self.stress_information_notebooks(id),
style = "Bold.TButton")
self._shell_btn_length_info = ttk.Button(self._tab_prop, text='Length info',
command=lambda id= "length": self.stress_information_notebooks(id),
style = "Bold.TButton")
self._shell_loads_other_gui_items = [self._lab_shell_loads, self._ent_shell_force_input,
self._ent_shell_stress_input]
self._shell_loads_forces_gui_items = [self._ent_shell_Nsd, self._ent_shell_Msd,
self._ent_shell_Tsd, self._ent_shell_Qsd, self._ent_shell_psd]
self._shell_loads_stress_gui_items = [self._ent_shell_sasd, self._ent_shell_smsd,self._ent_shell_tTsd,
self._ent_shell_tQsd, self._ent_shell_psd,self._ent_shell_shsd]
self._shell_other_gui_items = [self._ent_shell_end_cap_pressure_included, self._ent_shell_uls_or_als,
self._ent_shell_fab_ring_stf, self._ent_shell_fab_ring_frame,
self._lab_shell_limit_state,
self._lab_shell_en_cap_pressure,self._lab_shell_fab_stf,
self._lab_shell_fab_frame,self._ent_shell_yield,self._lab_yield]
self._shell_exclude_ring_stf = tk.Frame(self._tab_prop, height=10, bg="black", colormap="new", )
self._shell_exclude_ring_frame = tk.Frame(self._tab_prop, height=10, bg="black", colormap="new")
''' END shell input '''
prop_vert_start = 0.01
types_start = 0.005208333
options = list(CylinderAndCurvedPlate.geomeries.values()) # Shell geometry selection [string]
self._shell_geometries_map = CylinderAndCurvedPlate.geomeries_map # Shell geometry selection string : int
self._current_calculation_domain = 'Flat plate, stiffened'
self._unit_informations_dimensions = list()
self._ent_calculation_domain = ttk.OptionMenu(self._tab_prop, self._new_calculation_domain,options[0], *options,
command=self.calculation_domain_selected)
ttk.Label(self._tab_prop, text='Structural and calculation properties input below:',
font=self._text_size['Text 9 bold'],
).place(rely=prop_vert_start-delta_y*2.1,relx=types_start,
anchor = tk.NW)
ttk.Label(self._tab_prop, text='Select calculation domain ->',
font=self._text_size['Text 10 bold'],
).place(rely=prop_vert_start, relx=types_start,
anchor=tk.NW)
self._ent_calculation_domain.place(rely=prop_vert_start, relx=types_start + delta_x*5)
# --- Compartment/tank load input and information ---
load_vert_start = 0.05#frame_horizontal -0.03
ttk.Label(self._tab_comp,text = 'Selected compartment from box below:', )\
.place(relx=types_start, rely=load_vert_start + 8*delta_y)
self._selected_tank = ttk.Label(self._tab_comp,text='', font = 'Verdana 20 bold')
self._selected_tank.place(relx=0.3, rely=load_vert_start + 10*delta_y)
self._compartments_listbox = tk.Listbox(self._tab_comp, height = int(10 * 1),
width = int(5 * 1),
font=self._text_size["Text 10 bold"]
,
selectmode = 'extended' )
self._compartments_listbox.place(relx=types_start, rely=load_vert_start + 10*delta_y)
self._compartments_listbox.bind('<<ListboxSelect>>', self.button_1_click_comp_box)
ttk.Button(self._tab_comp, text="Set compartment\n""properties.",command = self.update_tank,
style = "Bold.TButton")\
.place(relx=types_start + delta_x*4, rely=load_vert_start + delta_y * 10, relwidth = 0.3)
ttk.Button(self._tab_comp, text="Delete all tanks", command=self.delete_all_tanks,
style = "Bold.TButton").place(relx=types_start + delta_x*4, rely=load_vert_start + delta_y * 12,
relwidth = 0.3)
self._ent_content_type = ttk.OptionMenu(self._tab_comp, self._new_content_type, list(self._tank_options.keys())[0],*list(self._tank_options.keys()),
command=self.tank_density_trace)
ent_width = 10
self._ent_overpressure = ttk.Entry(self._tab_comp, textvariable = self._new_overpresure,
width = int(ent_width * 1),
)
self._ent_density = ttk.Entry(self._tab_comp, textvariable = self._new_density,
width = int(ent_width * 1),
)
self._ent_max_el = ttk.Entry(self._tab_comp, textvariable=self._new_max_el,
width=int(ent_width * 1),
)
self._ent_min_el = ttk.Entry(self._tab_comp, textvariable=self._new_min_el,
width=int(ent_width * 1),
)
comp_dx = delta_x
comp_dy = delta_y
comp_ent_x = ent_x
comp_ent_y = 0.4
ttk.Label(self._tab_comp, text = '', )\
.place(relx=0.052083333, rely=comp_ent_y + 3.4*comp_dy)
ttk.Label(self._tab_comp, text='Tank content :', font = self._text_size['Text 8'], )\
.place(relx=hor_start, rely=comp_ent_y + comp_dy * 4.5)
self._ent_content_type.place(relx= comp_ent_x+0.35*comp_dx, rely=comp_ent_y + comp_dy * 4.5)
ttk.Label(self._tab_comp, text='Tank density [kg/m^3]:', font = self._text_size['Text 8'], )\
.place(relx=hor_start, rely=comp_ent_y + comp_dy * 6)
self._ent_density.place(relx=comp_ent_x+0.4*comp_dx, rely=comp_ent_y + comp_dy * 6)
ttk.Label(self._tab_comp, text='Overpressure [Pa]:', font = self._text_size['Text 8'], )\
.place(relx=hor_start, rely=comp_ent_y + comp_dy * 7)
self._ent_overpressure.place(relx=comp_ent_x+0.4*comp_dx, rely=comp_ent_y + comp_dy * 7)
ttk.Label(self._tab_comp, text='Max elevation [m]:', font = self._text_size['Text 8'], )\
.place(relx=hor_start, rely=comp_ent_y + comp_dy * 8)
self._ent_max_el.place(relx=comp_ent_x+0.4*comp_dx, rely=comp_ent_y + comp_dy * 8)
ttk.Label(self._tab_comp, text='Min elevation [m]:', font = self._text_size['Text 8'], )\
.place(relx=hor_start, rely=comp_ent_y + comp_dy * 9)
self._ent_min_el.place(relx=comp_ent_x+0.4*comp_dx, rely=comp_ent_y + comp_dy * 9)
self._tank_acc_label = ttk.Label(self._tab_comp, text = 'Acceleration [m/s^2]: ',
font = self._text_size['Text 8'], )
self._tank_acc_label.place(relx=hor_start, rely=comp_ent_y + comp_dy * 10)
# --- button to create compartments and define external pressures ---
try:
img_file_name = 'img_int_pressure_button.gif'
if os.path.isfile('images/' + img_file_name):
file_path = 'images/' + img_file_name
else:
file_path = self._root_dir + '/images/' + img_file_name
photo = tk.PhotoImage(file=file_path)
self._int_button = tk.Button(self._tab_comp,image = photo,command=self.grid_find_tanks, bg = 'white')
self._int_button.image = photo
self._int_button.place(relx=types_start +delta_x, rely=load_vert_start + delta_y*3,
relheight = 0.07, relwidth = 0.6)
except TclError:
tk.Button(self._tab_comp, text='New tanks - start search \n'
'to find compartments', command=self.grid_find_tanks,
bg = self._button_bg_color, fg = self._button_fg_color, ) \
.place(relx=types_start, rely=load_vert_start + 1.55 * delta_y,
relheight=0.044, relwidth=0.3)
show_compartment = ttk.Button(self._tab_comp, text='Display current\n compartments',
command=self.grid_display_tanks,
style = "Bold.TButton")
show_compartment.place(relx=types_start + delta_x*4, rely=load_vert_start + delta_y * 14, relwidth = 0.3)
try:
img_file_name = 'img_ext_pressure_button.gif'
if os.path.isfile('images/' + img_file_name):
file_path = 'images/' + img_file_name
else:
file_path = self._root_dir + '/images/' + img_file_name
photo = tk.PhotoImage(file=file_path)
self._ext_button = tk.Button(self._tab_comp,image=photo, command = self.on_show_loads,
bg = 'white')
self._ext_button.image = photo
self._ext_button.place(relx=types_start + delta_x, rely=load_vert_start,
relheight = 0.07, relwidth = 0.6)
except TclError:
tk.Button(self._tab_comp, text='New external load window \nsea - static/dynamic',
command=self.on_show_loads
)\
.place(relx=ent_x+delta_x*1.5, rely=load_vert_start+1.55*delta_y,
relheight = 0.044, relwidth = 0.11)
lc_x, lc_x_delta, lc_y, lc_y_delta = 0.786458333, 0.015625, 0.12037037, 0.023148148
# --- infomation on accelerations ----
ttk.Label(self._main_fr,text='Static and dynamic accelerations',
)\
.place(relx=lc_x, rely=lc_y - 5 * lc_y_delta)
ttk.Label(self._main_fr,text='Static acceleration [m/s^2]: ',
)\
.place(relx=lc_x, rely=lc_y - 4 * lc_y_delta)
ttk.Label(self._main_fr,text='Dyn. acc. loaded [m/s^2]:',
)\
.place(relx=lc_x, rely=lc_y - 3 * lc_y_delta)
ttk.Label(self._main_fr,text='Dyn. acc. ballast [m/s^2]:',
)\
.place(relx=lc_x, rely=lc_y - 2 * lc_y_delta)
self._new_dyn_acc_loaded = tk.DoubleVar()
self._new_dyn_acc_ballast = tk.DoubleVar()
self._new_static_acc = tk.DoubleVar()
self._new_static_acc.set(9.81), self._new_dyn_acc_loaded.set(0), self._new_dyn_acc_ballast.set(0)
shift_x_acc = 0.08
ttk.Entry(self._main_fr, textvariable = self._new_static_acc,width = 10,
)\
.place(relx=lc_x+shift_x_acc, rely=lc_y - 4 * lc_y_delta)
ttk.Entry(self._main_fr, textvariable = self._new_dyn_acc_loaded,width = 10,
)\
.place(relx=lc_x+shift_x_acc , rely=lc_y - 3 * lc_y_delta)
ttk.Entry(self._main_fr, textvariable = self._new_dyn_acc_ballast,width = 10,
)\
.place(relx=lc_x+shift_x_acc , rely=lc_y - 2 * lc_y_delta)
ttk.Button(self._main_fr, text = 'Set\naccelerations', command = self.create_accelerations,
style = "Bold.TButton")\
.place(relx=lc_x +shift_x_acc*1.5, rely=lc_y - 4 * lc_y_delta)
# --- checkbuttons and labels ---
self._dnv_a_chk,self._dnv_b_chk = tk.IntVar(),tk.IntVar()
self._tank_test_chk,self._manual_chk = tk.IntVar(),tk.IntVar()
self._check_button_load_comb = [self._dnv_a_chk,self._dnv_b_chk, self._tank_test_chk, self._manual_chk]
self._active_label = ttk.Label(self._main_fr, text = '',
)
self._active_label.place(relx=lc_x+lc_x_delta*10,rely=lc_y-lc_y_delta*5)
ttk.Label(self._main_fr, text='Combination for line (select line). Change with slider.: ',
)\
.place(relx=lc_x, rely=lc_y + 2.5*delta_y)
lc_y += 0.148148148
self._combination_slider = ttk.Scale(self._main_fr, from_=1, to=4, command=self.gui_load_combinations,length=400,
orient = 'horizontal')
ttk.Label(self._main_fr, text='1: DNV a) 2: DNV b) 3: TankTest '
' 4: Cylinder')\
.place(relx=lc_x +0*lc_x_delta, rely=lc_y - 2*lc_y_delta)
self._combination_slider.place(relx=lc_x +0*lc_x_delta, rely=lc_y - 3*lc_y_delta)
self._combination_slider_map = {1:'dnva',2:'dnvb',3:'tanktest', 4: 'Cylinder'}
ttk.Label(self._main_fr, text='Name:', )\
.place(relx=lc_x + 0 * lc_x_delta, rely=lc_y)
ttk.Label(self._main_fr, text='Stat LF', )\
.place(relx=lc_x + 8.5 * lc_x_delta, rely=lc_y)
ttk.Label(self._main_fr, text='Dyn LF', )\
.place(relx=lc_x + 10.2 * lc_x_delta, rely=lc_y)
ttk.Label(self._main_fr, text='Include?',font = self._text_size['Text 7'], )\
.place(relx=lc_x + 11.8 * lc_x_delta, rely=lc_y)
self._result_label_dnva = ttk.Label(self._main_fr, text='DNV a [Pa]: ',font='Text 8', )
self._result_label_dnvb = ttk.Label(self._main_fr, text='DNV b [Pa]: ',font=self._text_size["Text 8"],
)
self._result_label_tanktest = ttk.Label(self._main_fr, text='Tank test [Pa]: ',font=self._text_size["Text 8"],
)
self._result_label_manual = ttk.Label(self._main_fr, text='Manual [Pa]: ',font=self._text_size["Text 8"],
)
self.results_gui_start = 0.6
self._lab_pressure = ttk.Label(self._main_fr, text = 'Pressures for this line: \n(DNV a/b [loaded/ballast], tank test, manual)\n'
'Note that ch. 4.3.7 and 4.3.8 is accounted for.',font=self._text_size["Text 10"],
)
self._lab_pressure.place(relx= 0.786458333, rely= self.results_gui_start)
# --- optimize button ---
ttk.Label(self._main_fr,text='Optimize selected line/structure (right click line):',
font = self._text_size['Text 9 bold'], )\
.place(relx=lc_x, rely=lc_y - 7 * lc_y_delta)
try:
img_file_name = 'img_optimize.gif'
if os.path.isfile('images/' + img_file_name):
file_path = 'images/' + img_file_name
else:
file_path = self._root_dir + '/images/' + img_file_name
photo = tk.PhotoImage(file=file_path)
self._opt_button = tk.Button(self._main_fr,image=photo, command = self.on_optimize,
bg = 'white', fg = self._button_fg_color)
self._opt_button.image = photo
self._opt_button.place(relx=lc_x, rely=lc_y - 6 * lc_y_delta, relheight = 0.04, relwidth = 0.098)
except TclError:
self._opt_button =tk.Button(self._main_fr, text='Optimize', command=self.on_optimize,
bg = self._button_bg_color, fg = self._button_fg_color)
self._opt_button.place(relx=lc_x, rely=lc_y - 6 * lc_y_delta, relheight = 0.04, relwidth = 0.098)
try:
img_file_name = 'img_multi_opt.gif'
if os.path.isfile('images/' + img_file_name):
file_path = 'images/' + img_file_name
else:
file_path = self._root_dir + '/images/' + img_file_name
photo = tk.PhotoImage(file=file_path)
self._opt_button_mult = tk.Button(self._main_fr,image=photo, command = self.on_optimize_multiple,
bg = self._button_bg_color, fg = self._button_fg_color)
self._opt_button_mult.image = photo
self._opt_button_mult.place(relx=lc_x+0.1, rely=lc_y - 6 * lc_y_delta, relheight = 0.04, relwidth = 0.065)
except TclError:
self._opt_button_mult= tk.Button(self._main_fr, text='MultiOpt', command=self.on_optimize_multiple,
bg = self._button_bg_color, fg = self._button_fg_color)
self._opt_button_mult.place(relx=lc_x+0.1, rely=lc_y - 6 * lc_y_delta, relheight = 0.04, relwidth = 0.065)
try:
img_file_name = 'cylinder_opt.png'
if os.path.isfile('images/' + img_file_name):
file_path = 'images/' + img_file_name
else:
file_path = self._root_dir + '/images/' + img_file_name
photo = tk.PhotoImage(file=file_path)
self._opt_cylinder = tk.Button(self._main_fr,image=photo, command = self.on_optimize_cylinder,
bg = 'white', fg = 'white')
self._opt_cylinder.image = photo
except TclError:
self._opt_cylinder = tk.Button(self._main_fr, text='Cylinder optimization',
command=self.on_optimize_cylinder,
bg = self._button_bg_color, fg = self._button_fg_color)
self._opt_button_span = ttk.Button(self._main_fr, text='SPAN', command=self.on_geometry_optimize,
style = "Bold.TButton")
self._opt_button_span.place(relx=lc_x + 0.167,rely=lc_y - 6 * lc_y_delta, relheight = 0.04,
relwidth = 0.04)
self._optimization_buttons = {'Flat plate, stiffened': [self._opt_button, self._opt_button_mult,
self._opt_button_span],
'Flat plate, stiffened place': [[lc_x, lc_y - 6 * lc_y_delta, 0.04, 0.098],
[lc_x+0.1, lc_y - 6 * lc_y_delta, 0.04, 0.065],
[lc_x + 0.167, lc_y - 6 * lc_y_delta, 0.04, 0.04]],
'Flat plate, unstiffened': [],
'Flat plate, unstiffened place': [],
'Flat plate, stiffened with girder': [],
'Flat plate, stiffened with girder place': [],
'cylinder': [self._opt_cylinder],
'cylinder place' : [[lc_x, lc_y - 6 * lc_y_delta, 0.04, 0.175]]}
# Load information button
ttk.Button(self._main_fr, text='Load info', command=self.button_load_info_click,style = "Bold.TButton")\
.place(relx=0.78,rely=0.7, relwidth = 0.04)
# Load information button
ttk.Button(self._main_fr, text='Load factors', command=self.on_open_load_factor_window,style = "Bold.TButton")\
.place(relx=0.8225,rely=0.7, relwidth = 0.05)
# PULS result information
self._puls_information_button = ttk.Button(self._main_fr, text='PULS results for line',
command=self.on_puls_results_for_line,style = "Bold.TButton")
self._puls_information_button.place(relx=0.875,rely=0.7, relwidth = 0.075)
# Wight developement plot
self._weight_button = ttk.Button(self._main_fr, text='Weights',
command=self.on_plot_cog_dev,style = "Bold.TButton")
self._weight_button.place(relx=0.9525,rely=0.7, relwidth = 0.038)
self.gui_structural_properties() # Initiating the flat panel structural properties
self.set_colors('default') # Setting colors theme
# self._current_theme = 'default'
def set_colors(self, theme):
self._current_theme = theme
if theme == 'light':
self._general_color = 'alice blue'
self._color_text = 'black'
ent_bg = '#FFFFFF'
elif theme == 'grey':
self._general_color = 'light grey'
self._color_text = 'black'
ent_bg = '#FFFFFF'
elif theme == 'dark':
self._general_color = '#2B2B2B'
self._color_text = 'light grey'
ent_bg = '#FFFFFF'
elif theme == 'default':
self._general_color = '#F0F0F0'
self._color_text = 'black'
ent_bg = '#FFFFFF'
elif theme == 'pink':
self._general_color = '#FFD3F6'
self._color_text = 'black'
ent_bg = 'white'
#relx=x_canvas_place, rely=0,relwidth=0.523, relheight = 0.73
elif theme == 'SlavaUkraini':
self._general_color = '#0057b7'
self._color_text = 'white'
ent_bg = 'white'
cavas_bg = '#ffd700'
elif theme == 'modelling':
self._main_canvas.place_forget()
x_canvas_place = 0.26
self._main_canvas.place(relx=x_canvas_place, rely=0,relwidth=0.74, relheight = 0.99)
tk.Misc.lift(self._main_canvas)
self._gui_functional_look = 'modelling'
elif theme == 'all items':
self._gui_functional_look = 'all items'
self._main_canvas.place_forget()
x_canvas_place = 0.26
self._main_canvas.place(relx=x_canvas_place, rely=0, relwidth=0.523, relheight=0.73)
elif theme == 'cylinder':
self._main_canvas.place_forget()
x_canvas_place = 0.26
self._main_canvas.place(relx=x_canvas_place, rely=0,relwidth=0.74, relheight = 0.73)
tk.Misc.lift(self._main_canvas)
self._gui_functional_look = 'cylinder'
placement = self._gui_functional_look_cylinder_opt # [0.786458333, 0.12962963000000005, 0.04, 0.175]
self._opt_cylinder.place(relx=placement[0], rely=placement[1], relheight=placement[2], relwidth=placement[3])
tk.Misc.lift(self._opt_cylinder)
if theme not in ['modelling', 'all items','cylinder']:
self._style.configure("Bold.TButton", font=('Sans', '10', 'bold'))
self._style.configure('TCheckbutton', background=self._general_color)
self._style.configure('TFrame', background=self._general_color)
self._style.configure('TLabel', background=self._general_color, foreground = self._color_text)
self._style.configure('TScale', background=self._general_color)
self._style.configure('TEntry', background=ent_bg)
self._style.configure('TOptionMenu', background=ent_bg)
self._style.configure("TMenubutton", background=ent_bg)
self._style.configure('TRadiobutton', background=self._general_color, foreground='black')
if theme in ['SlavaUkraini',]:
self._prop_canvas.configure(bg=cavas_bg)
self._main_canvas.configure(bg=cavas_bg)
self._result_canvas.configure(bg=cavas_bg)
else:
self._prop_canvas.configure(bg = self._general_color)
self._main_canvas.configure(bg = self._general_color)
self._result_canvas.configure(bg = self._general_color)
# self._frame_viz_hor.configure(bg =self._color_text)
# self._frame_viz_ver.configure(bg=self._color_text)
self.update_frame()
def gui_structural_properties(self, flat_panel_stf_girder = False, flat_unstf = False, flat_stf = True,
shell = False, long_stf = False, ring_stf = False,
ring_frame = False, force_input = False, stress_input = False):
vert_start = 0.04
hor_start = 0.02
delta_y = 0.024
delta_x = 0.13
ent_relx = hor_start + 6*delta_x
geo_ent_width = 0.1
ent_geo_y = 0.1
opt_width = 0.2
self._unit_informations_dimensions = list()
if any([flat_unstf, flat_stf, flat_panel_stf_girder]):
'''
self._flat_gui_headlines = [ttk.Label(self._tab_prop, text='Plate input'),
ttk.Label(self._tab_prop, text='Stiffener'),
ttk.Label(self._tab_prop, text='Girder'),
ttk.Label(self._tab_prop, text='Load/stresses input'),
ttk.Label(self._tab_prop, text='Special provitions input'),
ttk.Label(self._tab_prop, text='Buckling input')]
'''
# Top buttons
top_button_shift = 0.2
self._stf_button.place(relx=hor_start, rely=vert_start+ top_button_shift * delta_y)
self._stress_button.place(relx=hor_start + delta_x*1.5, rely=vert_start+ top_button_shift * delta_y)
self._fls_button.place(relx=hor_start + delta_x*3, rely=vert_start+ top_button_shift * delta_y)
self.add_stucture.place(relx=hor_start + delta_x*4.5, rely=vert_start+ top_button_shift * delta_y,
relheight = 0.065, relwidth = 0.39)
# Input fields
if any([shell, long_stf, ring_stf, ring_frame, force_input, stress_input]):
return
self._flat_gui_headlines[0].place(relx=hor_start, rely=vert_start + 3 * delta_y)
idx = 4
for pl_lab, pl_ent in zip(self._flat_gui_lab_plate, self._flat_gui_plate):
pl_lab.place(relx=hor_start, rely=vert_start + idx * delta_y)
pl_ent.place(relx=hor_start + 3*delta_x, rely=vert_start + idx * delta_y)
idx += 1
for stf_lab, stf_ent, girder_ent in zip(self._flat_gui_lab_stf, self._flat_gui_stf, self._flat_gui_girder):
if flat_panel_stf_girder:
girder_ent.place(relx=hor_start + 5 * delta_x, rely=vert_start + idx * delta_y)
if flat_stf:
stf_lab.place(relx=hor_start, rely=vert_start + idx * delta_y)
stf_ent.place(relx=hor_start + 3 * delta_x, rely=vert_start + idx * delta_y)
idx += 1
self._flat_gui_headlines[3].place(relx=hor_start + 0 * delta_x, rely=vert_start + idx * delta_y)
idx += 1
this_count = 1
for load_lab, load_ent in zip(self._flat_gui_lab_loads, self._flat_gui_loads):
load_lab.place(relx=hor_start, rely=vert_start + idx * delta_y)
load_ent.place(relx=hor_start + 3*delta_x, rely=vert_start + idx * delta_y)
idx += 1
this_count += 1
idx_now = idx
idx -= this_count
self._flat_gui_headlines[4].place(relx=hor_start + 5 * delta_x, rely=vert_start + idx * delta_y)
idx += 1
for prov_lab, prov_ent in zip(self._flat_gui_lab_os_c101_provisions, self._flat_gui_os_c101_provisions):
prov_lab.place(relx=hor_start + 5 * delta_x, rely=vert_start + idx * delta_y)
prov_ent.place(relx=hor_start + 6.5 * delta_x, rely=vert_start + idx * delta_y)
idx += 1
self._flat_btn_load_info.place(relx=hor_start + 5 * delta_x,
rely=vert_start + (idx+1) * delta_y)
self._button_str_type.place(relx=hor_start + 5 * delta_x,
rely=vert_start + (idx+3) * delta_y)
idx = idx_now
self._flat_gui_headlines[5].place(relx=hor_start + 0 * delta_x, rely=vert_start + idx * delta_y)
idx += 1
self._lab_buckling_method.place(relx=hor_start + 0 * delta_x, rely=vert_start + idx * delta_y)
self._buckling_method.place(relx=hor_start + 4 * delta_x, rely=vert_start + idx * delta_y)
idx += 1
if flat_panel_stf_girder:
self._flat_gui_headlines[7].place(relx=hor_start + 6 * delta_x, rely=vert_start + idx * delta_y)
if flat_stf:
self._flat_gui_headlines[6].place(relx=hor_start + 4*delta_x, rely=vert_start + idx * delta_y)
idx += 1
for buckling_lab, buckling_stf_ent, buckling_girder_ent in zip(self._flat_gui_buc_lab_stf_girder,
self._flat_gui_buc_stf_opt,
self._flat_gui_buc_girder_opt):
if flat_panel_stf_girder:
buckling_girder_ent.place(relx=hor_start + 6 * delta_x, rely=vert_start + idx * delta_y)
if flat_stf:
buckling_lab.place(relx=hor_start, rely=vert_start + idx * delta_y)
buckling_stf_ent.place(relx=hor_start + 4 * delta_x, rely=vert_start + idx * delta_y)
idx += 1
if flat_panel_stf_girder:
self._flat_gui_girder_moment_factor[0].place(relx=hor_start + 0 * delta_x, rely=vert_start + idx * delta_y)
self._flat_gui_girder_moment_factor[1].place(relx=hor_start + 6 * delta_x, rely=vert_start + idx * delta_y,
relwidth = 0.08)
self._flat_gui_girder_moment_factor[2].place(relx=hor_start + 7 * delta_x, rely=vert_start + idx * delta_y,
relwidth = 0.08)
idx += 1
for buckling_lab, buckling_ent in zip(self._flat_gui_buc_lab_common, self._flat_gui_buc_common_opt):
buckling_lab.place(relx=hor_start, rely=vert_start + idx * delta_y)
buckling_ent.place(relx=hor_start + 5 * delta_x, rely=vert_start + idx * delta_y)
idx += 1
for buckling_lab, buckling_ent in zip(self._flat_gui_lab_buckling, self._flat_gui_buckling):
buckling_lab.place(relx=hor_start, rely=vert_start + idx * delta_y)
buckling_ent.place(relx=hor_start + 5 * delta_x, rely=vert_start + idx * delta_y)
idx += 1
self._puls_run_all.place(relx=hor_start + 6 * delta_x, rely=vert_start + (idx-2) * delta_y)
# optimize buttons
for dom in ['Flat plate, unstiffened', 'Flat plate, stiffened', 'Flat plate, stiffened with girder',
'cylinder']:
for btn, placement in zip(self._optimization_buttons[dom],
self._optimization_buttons[dom + ' place']):
btn.place_forget()
for btn, placement in zip(self._optimization_buttons[self._new_calculation_domain.get()],
self._optimization_buttons[self._new_calculation_domain.get() + ' place']):
btn.place(relx=placement[0], rely=placement[1], relheight=placement[2], relwidth=placement[3])
if shell:
'''
self._shell_gui_items = [self._lab_shell, self._ent_shell_plate_thk, self._ent_shell_radius,
self._ent_shell_dist_rings,
self._ent_shell_length,self._ent_shell_tot_length,self._ent_shell_k_factor]
'''
self._lab_shell.place(relx=hor_start, rely=ent_geo_y+ delta_y)
tmp_unit_info = list()
for lab in ['Shell plate thickness', 'Shell radius (middle of plate)', 'Distance between rings, l',
'Length of shell, L', 'Total cylinder length, Lc', 'Effective buckling length factor, k',
'Material factor']:
tmp_unit_info.append(ttk.Label(self._tab_prop, text=lab))
for lab, idx in zip(tmp_unit_info, range(len(tmp_unit_info))):
lab.place(relx=hor_start,rely=ent_geo_y+ delta_y*(2+idx))
self._unit_informations_dimensions.append(lab)
for idx, entry in enumerate(self._shell_gui_items[1:]):
entry.place(relx=hor_start + 5*delta_x, rely=ent_geo_y + delta_y*(2+idx), relwidth=geo_ent_width)
self._shell_btn_length_info.place(relx=hor_start + 6*delta_x, rely=ent_geo_y + delta_y*(idx))
ent_geo_y += delta_y*(len(self._shell_gui_items[1:])+1)
if long_stf:
self._lab_shell_long_stiffener.place(relx=hor_start, rely=ent_geo_y+ delta_y)
tmp_unit_info = list()
for lab in ['Web, hw', 'Web, tw', 'Flange b', 'Flange, tf', 'Spacing, s', 'Stf. type', 'Load section']:
tmp_unit_info.append(ttk.Label(self._tab_prop, text=lab))
for lab, idx in zip(tmp_unit_info, range(len(tmp_unit_info))):
lab.place(relx=hor_start + idx * delta_x,rely=ent_geo_y+ delta_y*2)
self._unit_informations_dimensions.append(lab)
for idx, entry in enumerate(self._shell_long_stf_gui_items[1:]):
entry.place(relx=hor_start + idx * delta_x, rely=ent_geo_y+ delta_y*3, relwidth=geo_ent_width)
self._unit_informations_dimensions.append(self._lab_shell_long_stiffener)
ent_geo_y += delta_y*3
if ring_stf:
self._lab_shell_ring_stiffener.place(relx=hor_start, rely=ent_geo_y+ delta_y*1)
tmp_unit_info = list()
for lab in ['Web, hw', 'Web, tw', 'Flange, b', 'Flange, tf','tr. br. dist', 'Stf. type',
'Exclude', 'Load section prop.']:
tmp_unit_info.append(ttk.Label(self._tab_prop, text=lab))
for lab, idx in zip(tmp_unit_info, range(len(tmp_unit_info))):
if idx in [6,7]:
lab.place(relx=hor_start + (idx-6) * delta_x*3, rely=ent_geo_y + delta_y * 4)
else:
lab.place(relx=hor_start + idx * delta_x,rely=ent_geo_y+ delta_y*2)
self._unit_informations_dimensions.append(lab)
self._unit_informations_dimensions.append(self._lab_shell_ring_stiffener)
for idx, entry in enumerate(self._shell_ring_stf_gui_items[1:]):
if idx in [6,7]:
entry.place(relx=hor_start + (idx-6) * delta_x*4 + delta_x, rely=ent_geo_y+ delta_y*4, relwidth=geo_ent_width)
else:
entry.place(relx=hor_start + idx * delta_x, rely=ent_geo_y+ delta_y*3, relwidth=geo_ent_width)
if self._new_shell_exclude_ring_stf.get():
self._shell_exclude_ring_stf.place(relx=0.005, rely=ent_geo_y+ delta_y*3.15, relwidth=0.9)
self._unit_informations_dimensions.append(self._shell_exclude_ring_stf)
ent_geo_y += delta_y*4
if ring_frame:
self._lab_shell_ring_frame.place(relx=hor_start, rely=ent_geo_y+ delta_y*1)
for idx, entry in enumerate(self._shell_ring_frame_gui_items[1:]):
if idx in [7, 8]:
entry.place(relx=hor_start + (idx - 7) * delta_x * 4 + delta_x, rely=ent_geo_y + delta_y * 4,
relwidth=geo_ent_width)
else:
entry.place(relx=hor_start + idx * delta_x, rely=ent_geo_y + delta_y * 3, relwidth=geo_ent_width)
tmp_unit_info = list()
for lab in ['Web, hw', 'Web, tw', 'Flange, b', 'Flange, tf', 'tr. br. dist', 'L bet. Gird.',
'Stf. type', 'Exclude', 'Load section prop.']:
tmp_unit_info.append(ttk.Label(self._tab_prop, text=lab))
for lab, idx in zip(tmp_unit_info, range(len(tmp_unit_info))):
if idx in [7,8]:
lab.place(relx=hor_start + (idx-7) * delta_x*3, rely=ent_geo_y + delta_y * 4)
else:
lab.place(relx=hor_start + idx * delta_x,rely=ent_geo_y+ delta_y*2)
self._unit_informations_dimensions.append(lab)
self._unit_informations_dimensions.append(self._lab_shell_ring_frame)
if self._new_shell_exclude_ring_frame.get():
self._shell_exclude_ring_frame.place(relx=0.005, rely=ent_geo_y+ delta_y*3.15, relwidth=0.9)
self._unit_informations_dimensions.append(self._shell_exclude_ring_frame)
ent_geo_y += delta_y*3
if not any([flat_panel_stf_girder, flat_stf, flat_unstf]):
# Other data
'''
self._shell_other_gui_items = [self._ent_shell_end_cap_pressure_included, self._ent_shell_uls_or_als,
self._ent_shell_fab_ring_stf, self._ent_shell_fab_ring_frame]
'''
self._lab_shell_limit_state.place(relx=hor_start,
rely=ent_geo_y + delta_y*2.2)
self._ent_shell_uls_or_als.place(relx=hor_start+ 1.6 * delta_x,
rely=ent_geo_y + delta_y*2.2,
relwidth=geo_ent_width*2)
# Load data
ent_geo_y += 3.3 * delta_y
#self._lab_shell_loads.place(relx=hor_start, rely=ent_geo_y - delta_y*1.5)
self._ent_shell_stress_input.place(relx=hor_start, rely=ent_geo_y)
if 'shell' in self._new_calculation_domain.get():
self._ent_shell_force_input.place(relx=hor_start + 2 * delta_x, rely=ent_geo_y)
else:
self._new_shell_stress_or_force.set(2)
lab_force = ['Axial', 'Bending', 'Torsional','Shear', 'Lateral']
lab_force_unit = ['kN', 'kNm', 'kNm', 'kN', 'N/mm2']
lab_stress = ['Axial', 'Bending', 'Torsional', 'Shear',
'Lateral', 'Add hoop']
lab_stress_unit = ['N/mm2', 'N/mm2', 'N/mm2', 'N/mm2', 'N/mm2', 'N/mm2']
to_use = self._shell_loads_forces_gui_items if self._new_shell_stress_or_force.get() == 1 \
else self._shell_loads_stress_gui_items
lab_to_use = [lab_force, lab_force_unit] if self._new_shell_stress_or_force.get() == 1\
else [lab_stress, lab_stress_unit]
tmp_unit_info = list()
tmp_unit_info_unit = list()
[tmp_unit_info.append(ttk.Label(self._tab_prop, text=val))
for val in lab_to_use[0]]
[tmp_unit_info_unit.append(ttk.Label(self._tab_prop, text=val))
for val in lab_to_use[1]]
for idx,lab in enumerate(tmp_unit_info):
lab.place(relx=hor_start, rely=ent_geo_y + (idx+1)*delta_y)
self._unit_informations_dimensions.append(lab)
for idx, entry in enumerate(to_use):
entry.place(relx=hor_start + 1.5*delta_x,
rely=ent_geo_y + (idx+1)*delta_y, relwidth=geo_ent_width)
for idx, lab in enumerate(tmp_unit_info_unit):
lab.place(relx=hor_start + 2.5*delta_x,
rely=ent_geo_y + (idx+1)*delta_y)
self._unit_informations_dimensions.append(lab)
self._shell_btn_load_info.place(relx=hor_start + 5*delta_x,
rely=ent_geo_y + 1*delta_y)
# Various
end_y = ent_geo_y + (idx+1)*delta_y
other_count = 1
self._lab_yield.place(relx=hor_start,
rely=end_y + delta_y*other_count)
self._ent_shell_yield.place(relx=hor_start+ 4 * delta_x,
rely=end_y + delta_y*other_count, relwidth=geo_ent_width)
other_count += 1
if ring_stf:
self._lab_shell_fab_stf.place(relx=hor_start,
rely=end_y + delta_y*other_count)
self._ent_shell_fab_ring_stf.place(relx = hor_start + 4 * delta_x,
rely=end_y + delta_y*other_count)
other_count += 1
if ring_frame:
self._lab_shell_fab_frame.place(relx=hor_start, rely=end_y + delta_y*other_count)
self._ent_shell_fab_ring_frame.place(relx=hor_start + 4 * delta_x,
rely=end_y + delta_y*other_count, relwidth=geo_ent_width*1.9)
other_count += 1
if self._shell_geometries_map[self._new_calculation_domain.get()] in [1,5]:
self._lab_shell_en_cap_pressure.place(relx=hor_start,
rely= end_y + delta_y*other_count)
self._ent_shell_end_cap_pressure_included.place(relx=3 * delta_x,
rely= end_y + delta_y*other_count)
other_count += 1
# Removing flat stuff
for dom in ['Flat plate, unstiffened', 'Flat plate, stiffened', 'Flat plate, stiffened with girder',
'cylinder']:
for btn, placement in zip(self._optimization_buttons[dom],
self._optimization_buttons[dom + ' place']):
btn.place_forget()
if not any([ring_stf, ring_frame]): # TODO optmizing not implemented yet for ring stf and frame.
for btn, placement in zip(self._optimization_buttons['cylinder'],
self._optimization_buttons['cylinder' + ' place']):
btn.place(relx=placement[0], rely=placement[1], relheight=placement[2], relwidth=placement[3])
def calculation_domain_selected(self, event = None):
'''
['Stiffened panel, flat', 'Unstiffened shell (Force input)', 'Unstiffened panel (Stress input)',
'Longitudinal Stiffened shell (Force input)', 'Longitudinal Stiffened panel (Stress input)',
'Ring Stiffened shell (Force input)', 'Ring Stiffened panel (Stress input)',
'Orthogonally Stiffened shell (Force input)', 'Orthogonally Stiffened panel (Stress input)']
'''
to_process = [self._puls_run_all, self._lab_buckling_method,
self._buckling_method, self._lab_yield,
self._lab_mat_fac,self._structure_types_label, self._button_str_type, self._ent_structure_type,
self._lab_structure_type, self._lab_kpp, self._lab_kps, self._lab_km1, self._lab_km2,
self._lab_stf_type, self._lab_press_side, self._ent_pressure_side,
self._lab_puls_input, self._lab_puls_up_supp, self._lab_puls_acceptance,
self._lab_puls_uf, self._lab_puls_int_gt, self._lab_puls_cont_sniped, self._lab_span, self._lab_s,
self._ent_puls_sp_or_up, self._ent_puls_method, self._ent_puls_uf, self._ent_puls_panel_boundary,
self._ent_puls_stf_end_type,
self._stf_button, self._stress_button,self._fls_button, self._shell_btn_load_info,
self._flat_btn_load_info, self._shell_btn_length_info,self._button_str_type]
to_process = to_process+self._shell_gui_items+self._shell_long_stf_gui_items+self._shell_ring_stf_gui_items+\
self._shell_ring_frame_gui_items+self._shell_loads_other_gui_items+\
self._shell_loads_forces_gui_items+self._shell_loads_stress_gui_items+\
self._unit_informations_dimensions + self._shell_other_gui_items+ self._flat_gui_plate + \
self._flat_gui_lab_plate + self._flat_gui_lab_stf+self._flat_gui_stf + self._flat_gui_girder + \
self._flat_gui_lab_loads + self._flat_gui_loads + self._flat_gui_lab_os_c101_provisions + \
self._flat_gui_os_c101_provisions + \
self._flat_gui_lab_buckling + self._flat_gui_buckling + self._flat_gui_headlines + \
self._flat_gui_buc_lab_common+ self._flat_gui_buc_common_opt+ self._flat_gui_buc_girder_opt+\
self._flat_gui_buc_lab_stf_girder+ self._flat_gui_buc_stf_opt+ self._flat_gui_girder_moment_factor
for item in to_process:
item.place_forget()
if event is not None:
self._new_shell_exclude_ring_stf.set(False)
self._new_shell_exclude_ring_frame.set(False)
'''
geomeries = {1:'Unstiffened shell (Force input)',
2:'Unstiffened panel (Stress input)',
3:'Longitudinal Stiffened shell (Force input)',
4:'Longitudinal Stiffened panel (Stress input)',
5:'Ring Stiffened shell (Force input)',
6:'Ring Stiffened panel (Stress input)',
7:'Orthogonally Stiffened shell (Force input)',
8:'Orthogonally Stiffened panel (Stress input)'}
'''
if self._new_calculation_domain.get() == 'Flat plate, unstiffened':
self._new_puls_sp_or_up.set('UP')
self.gui_structural_properties(flat_unstf = True, flat_stf = False)
elif self._new_calculation_domain.get() == 'Flat plate, stiffened':
self._new_puls_sp_or_up.set('SP')
self.gui_structural_properties(flat_stf = True)
elif self._new_calculation_domain.get() == 'Flat plate, stiffened with girder':
self._new_puls_sp_or_up.set('SP')
self.gui_structural_properties(flat_panel_stf_girder = True, flat_stf = True)
elif self._new_calculation_domain.get() in ['Unstiffened shell (Force input)',
'Unstiffened panel (Stress input)']:
self.gui_structural_properties(flat_unstf=False, flat_stf = False, flat_panel_stf_girder = False,
shell=True, long_stf=False, ring_stf=False, ring_frame=False)
elif self._new_calculation_domain.get() in ['Longitudinal Stiffened shell (Force input)',
'Longitudinal Stiffened panel (Stress input)']:
self.gui_structural_properties(flat_unstf=False, flat_stf = False, flat_panel_stf_girder = False,
shell=True, long_stf=True, ring_stf=False, ring_frame=False)
elif self._new_calculation_domain.get() in ['Ring Stiffened shell (Force input)',
'Ring Stiffened panel (Stress input)']:
self.gui_structural_properties(flat_unstf=False, flat_stf = False, flat_panel_stf_girder = False,
shell=True, long_stf=False, ring_stf=True, ring_frame=True)
elif self._new_calculation_domain.get() in ['Orthogonally Stiffened shell (Force input)',
'Orthogonally Stiffened panel (Stress input)']:
self.gui_structural_properties(flat_unstf=False, flat_stf = False, flat_panel_stf_girder = False,
shell=True, long_stf=True, ring_stf=True, ring_frame=True)
if self._line_is_active and self._active_line in self._line_to_struc.keys():
if event == None and self._line_to_struc[self._active_line][5] is not None:
mapper ={1: 'Force', 2: 'Stress'}
load = mapper[self._new_shell_stress_or_force.get()]
struc_obj = self._line_to_struc[self._active_line][5]
if self._new_shell_stress_or_force.get() == 1:
forces = [self._new_shell_Nsd.get(), self._new_shell_Msd.get(), \
self._new_shell_Tsd.get(), self._new_shell_Qsd.get()]
sasd, smsd, tTsd, tQsd, shsd = hlp.helper_cylinder_stress_to_force_to_stress(
stresses=None, forces=forces, geometry=struc_obj.geometry, shell_t=self._new_shell_thk.get(),
shell_radius=self._new_shell_radius.get(), shell_spacing=self._new_stf_spacing.get(),
hw=self._new_stf_web_h.get(), tw=self._new_stf_web_t.get(), b=self._new_stf_fl_w.get(),
tf=self._new_stf_fl_t.get(), CylinderAndCurvedPlate=CylinderAndCurvedPlate)
self._new_shell_sasd.set(sasd)
self._new_shell_smsd.set(smsd)
self._new_shell_tTsd.set(abs(tTsd))
self._new_shell_tQsd.set(tQsd)
# self._new_shell_shsd.set(0)
else:
stresses = [self._new_shell_sasd.get(), self._new_shell_smsd.get(), abs(self._new_shell_tTsd.get()),
self._new_shell_tQsd.get(), self._new_shell_shsd.get()]
sasd, smsd, tTsd, tQsd, shsd = stresses
Nsd, Msd, Tsd, Qsd, shsd = hlp.helper_cylinder_stress_to_force_to_stress(
stresses=stresses, geometry=struc_obj.geometry, shell_t=self._new_shell_thk.get(),
shell_radius=self._new_shell_radius.get(), shell_spacing=self._new_stf_spacing.get(),
hw=self._new_stf_web_h.get(), tw=self._new_stf_web_t.get(), b=self._new_stf_fl_w.get(),
tf=self._new_stf_fl_t.get(), CylinderAndCurvedPlate=CylinderAndCurvedPlate)
self._new_shell_Nsd.set(Nsd)
self._new_shell_Msd.set(Msd)
self._new_shell_Tsd.set(Tsd)
self._new_shell_Qsd.set(Qsd)
self._current_calculation_domain = self._new_calculation_domain.get()
# Setting the correct optmization buttons
def puls_run_all_lines(self, line_given = None):
progress = ttk.Progressbar(self._tab_prop, mode='indeterminate')
progress.place(relx = 0.85, rely = 0.9, relwidth = 0.1)
progress.start()
if self._PULS_results is None:
self._PULS_results = PULSpanel()
if self._PULS_results.puls_sheet_location is None or not os.path.isfile(self._PULS_results.puls_sheet_location):
tk.messagebox.showerror('No PULS excel sheet located', 'Set location of PULS excel sheet.\n'
'Note that PULS excel may require 32 bit '
'office.\n\n'
'A sheet may be provided but does not exist'
' in :\n'
+ str(self._PULS_results.puls_sheet_location) +
'\n\n A file dialogue will pop up after this message.')
self._PULS_results.puls_sheet_location= \
tk.filedialog.askopenfilename(parent=self._main_fr,title='Set location of PULS excel sheet.')
if self._PULS_results.puls_sheet_location == '':
tk.messagebox.showerror('No valid PULS sheet', 'No excel sheet was provided. Cannot run PULS.\n'
'Note that PULS excel may require 32 bit office.')
return
dict_to_run = {}
result_lines = list(self._PULS_results.get_run_results().keys())
if line_given == None:
current_button = self._puls_run_all
for line, data in self._line_to_struc.items():
if line not in result_lines:
data[0].Plate.hw = 0 if data[0].Stiffener is None else data[0].Stiffener.hw
data[0].Plate.tw = 0 if data[0].Stiffener is None else data[0].Stiffener.tw
data[0].Plate.b = 0 if data[0].Stiffener is None else data[0].Stiffener.b
data[0].Plate.tf = 0 if data[0].Stiffener is None else data[0].Stiffener.tf
dict_to_run[line] = data[0].Plate.get_puls_input()
dict_to_run[line]['Identification'] = line
dict_to_run[line]['Pressure (fixed)'] = self.get_highest_pressure(line)['normal']/1e6
else:
current_button = self._puls_run_one
if line_given == '':
return
if line_given not in result_lines:
dict_to_run[line_given] = self._line_to_struc[line_given][0].Plate.get_puls_input()
dict_to_run[line_given]['Identification'] = line_given
dict_to_run[line_given]['Pressure (fixed)'] = self.get_highest_pressure(line_given)['normal'] / 1e6
if len(dict_to_run) > 0:
#current_button.config(relief = 'sunken')
self._PULS_results.set_all_to_run(dict_to_run)
self._PULS_results.run_all()
#current_button.config(relief='raised')
current_button.config(text='PULS run or\nupdate all lines' if line_given == None else 'PULS\nRun one line')
#current_button.config(bg=self._button_bg_color)
for key, value in self._line_to_struc.items():
value[0].need_recalc = True
else:
tk.messagebox.showinfo('Results avaliable', 'PULS results is already avaliable for this line or no '
'lines need update.')
progress.stop()
progress.destroy()
self.update_frame()
def stress_information_notebooks(self, info_type = 'shell'):
''' Shows stress information '''
text_m = tk.Toplevel(self._parent, background=self._general_color)
# Create the text widget
text_widget = tk.Text(text_m , height=35, width=100)
# Create a scrollbar
scroll_bar = ttk.Scrollbar(text_m)
# Pack the scroll bar
# Place it to the right side, using tk.RIGHT
scroll_bar.pack(side=tk.RIGHT)
# Pack it into our tkinter application
# Place the text widget to the left side
text_widget.pack(side=tk.LEFT)
if info_type == 'shell':
long_text = 'Information on stresses:\n' \
' \n'\
'Uniform stresses is assumed.\n' \
'Shear stresses are set to positive.\n' \
'Compression stress is taken as NEGATIVE.\n' \
'Lateral pressure is taken as negative when acting toward cylinder center.\n' \
'Hoop stresses are negative when applying negative overpressure.\n ' \
' \n'
elif info_type == 'flat':
long_text = 'Information on stresses:\n' \
' \n' \
'Uniform or linear variable stresses is assumed.\n' \
'The stresses included in the check is acial memebrane stresses.\n'\
'Shear stresses are set to positive.\n' \
'Bending stresses are included included by lateral pressure and need not be included.\n'\
'Compression stress is taken as POSITIVE.\n' \
'The memebrane acial stress in transverse direction that is due to girder bending\n' \
'needs to be included in the check according to method 1.\n'\
'Lateral pressure outer overpressure is taken as positive.\n' \
' \n'
else:
long_text = 'Also see the "Help tab".'
# Insert text into the text widget
text_widget.insert('current', long_text)
try:
if info_type == 'shell':
img_file_name = 'Cylinder-Load_distribution.png'
elif info_type == 'flat':
img_file_name = 'img_axial_stresses.gif'
else:
img_file_name = 'Definition_of_parameters_L_and_LH.png'
if os.path.isfile('images/' + img_file_name):
file_path = 'images/' + img_file_name
else:
file_path = self._root_dir + '/images/' + img_file_name
photo = tk.PhotoImage(file=file_path)
self._int_button.config(image = photo)
self._int_button.image = photo
except TclError:
pass
text_widget.image_create('current', image=photo)
def trace_puls_uf(self, *args):
if self._PULS_results is not None:
pass
def trace_material_factor(self, *args):
try:
self._new_puls_uf.set(1/self._new_material_factor.get())
except (TclError, ZeroDivisionError):
pass
def trace_puls_up_or_sp(self, event = None):
if self._new_puls_sp_or_up.get() == 'UP':
vert_start = 0.1
hor_start = 0.02
delta_y = 0.04
delta_x = 0.13
ent_relx = hor_start + 6 * delta_x
geo_ent_width = 0.05
ent_geo_y = vert_start
opt_width = 0.2
shift_x = delta_x * 4
lab_place = delta_y * 13
self._ent_puls_up_boundary.place(relx=hor_start + shift_x, rely=vert_start + lab_place+ 2*delta_y,
relwidth=opt_width)
else:
self._ent_puls_up_boundary.place_forget()
def puls_run_one_line(self):
self.puls_run_all_lines(self._active_line)
self.update_frame()
def puls_delete_all(self):
'''
Deletes all existing PULS results
'''
if self._PULS_results is not None:
for key, val in self._line_to_struc.items():
self._PULS_results.result_changed(key)
val[0].need_recalc = True
self.update_frame()
def resize(self, event):
self.text_scale = 1#self._main_fr.winfo_width()/1920
self._text_size = {'Text 14 bold': 'Verdana '+str(int(14*self.text_scale))+' bold',
'Text 16 bold': 'Verdana ' + str(int(16 * self.text_scale)) + ' bold',
'Text 18 bold': 'Verdana ' + str(int(18 * self.text_scale)) + ' bold',
'Text 12 bold': 'Verdana ' + str(int(12 * self.text_scale)) + ' bold',
'Text 10 bold': 'Verdana '+str(int(10*self.text_scale))+' bold',
'Text 9 bold': 'Verdana ' + str(int(9 * self.text_scale)) + ' bold',
'Text 8 bold': 'Verdana ' + str(int(8 * self.text_scale)) + ' bold',
'Text 8': 'Verdana ' + str(int(8 * self.text_scale)),
'Text 9': 'Verdana ' + str(int(8 * self.text_scale)),
'Text 7': 'Verdana ' + str(int(7 * self.text_scale)),
'Text 10': 'Verdana ' + str(int(10 * self.text_scale)),
'Text 7 bold': 'Verdana ' + str(int(7 * self.text_scale)) + ' bold'}
#self.update_frame()
def toggle_select_multiple(self, event = None):
if self._toggle_btn.config('relief')[-1] == 'sunken':
self._toggle_btn.config(relief="raised")
self._toggle_btn.config(bg='#E1E1E1')
self._multiselect_lines = []
self._toggle_btn.config(text='Toggle select\n'
'multiple')
else:
self._toggle_btn.config(relief="sunken")
self._toggle_btn.config(bg=self._general_color)
self._toggle_btn.config(text = 'select lines')
self.update_frame()
def toggle_set_variable(self):
if self._toggle_btn.config('relief')[-1] == "raised":
tk.messagebox.showerror('Toggle select not chosen.', 'Toggle select button not pressed.\n'
'To change a parameter select a variable, \n'
'set the value you want to change and \n'
'press Change multi. param.')
return
var_to_set = self._new_toggle_var.get()
if var_to_set == '':
tk.messagebox.showerror('Select variable', 'Select a variable to change\n'
'in the drop down menu.')
return
# if not self._line_is_active:
# tk.messagebox.showerror('Select line', 'Click a line first.')
obj_dict = {'mat_yield': self._new_material.get,
'mat_factor': self._new_material_factor.get,
'span': self._new_field_len.get,
'spacing': self._new_stf_spacing.get,
'plate_thk': self._new_plate_thk.get,
'stf_web_height': self._new_stf_web_h.get,
'stf_web_thk': self._new_stf_web_t.get,
'stf_flange_width': self._new_stf_fl_w.get,
'stf_flange_thk': self._new_stf_fl_t.get,
'structure_type': self._new_stucture_type.get,
'stf_type': self._new_stf_type.get,
'sigma_y1': self._new_sigma_y1.get,
'sigma_y2': self._new_sigma_y2.get,
'sigma_x1': self._new_sigma_x1.get,
'sigma_x2': self._new_sigma_x2.get,
'tau_xy': self._new_tauxy.get,
'plate_kpp': self._new_plate_kpp,
'stf_kps': self._new_stf_kps.get,
'stf_km1': self._new_stf_km1.get,
'stf_km2': self._new_stf_km2.get,
'stf_km3': self._new_stf_km3.get,
'press_side': self._new_pressure_side.get,
#'structure_types': self._structure_types,
'zstar_optimization': self._new_zstar_optimization.get,
'puls buckling method': self._new_puls_method.get,
'puls boundary': self._new_puls_panel_boundary.get,
'puls stiffener end': self._new_buckling_stf_end_support.get,
'puls sp or up': self._new_puls_sp_or_up.get,
'puls up boundary': self._new_puls_up_boundary.get}
set_var = obj_dict[var_to_set]()
if var_to_set == 'mat_yield':
set_var = set_var* 1e6
elif var_to_set in ['spacing','plate_thk','stf_web_height','stf_web_thk',
'stf_flange_width','stf_flange_thk', 'span']:
set_var = set_var/1000
no_of_lines = len(self._multiselect_lines)
for idx, line in enumerate(self._multiselect_lines):
self._active_line = line
self._line_is_active = True
if self._active_line in self._line_to_struc.keys():
# if self._active_line[self._active_line][0].Stiffener is not None:
# dict = self._line_to_struc[self._active_line][0].Stiffener.get_structure_prop()
# else:
# dict = self._line_to_struc[self._active_line][0].Plate.get_structure_prop()
prop_dict = self._line_to_struc[self._active_line][0].get_main_properties()
prop_dict['Plate'][var_to_set][0] = set_var
prop_dict['Stiffener'][var_to_set][0] = set_var
#dict[var_to_set][0] = set_var
self.new_structure(toggle_multi=prop_dict, suspend_recalc=True if (idx+1) != no_of_lines else False)
def gui_load_combinations(self,event):
'''
Initsializing and updating gui for load combinations.
The fields are located left of the Canvas.
:return:
'''
if all([self._line_is_active,self._active_line in self._line_to_struc.keys(),
self._gui_functional_look == 'all items']):
lc_x, lc_x_delta, lc_y, lc_y_delta = 0.791666667, 0.026041667, 0.287037037, 0.023148148
#self._active_label.config(text=self._active_line)
combination = self._combination_slider_map[int(self._combination_slider.get())]
# removing label, checkbox and entry. setting created items to empty list.
[[item.destroy() for item in items] for items in
[self._lc_comb_created,self._comp_comb_created,self._manual_created, self._info_created]]
self._lc_comb_created, self._comp_comb_created, self._manual_created, self._info_created= [], [], [], []
if self._line_to_struc[self._active_line][0].Plate.get_structure_type() == '':
self._info_created.append(ttk.Label(self._main_fr, text='No structure type selected',
font=self._text_size["Text 10 bold"], ))
self._info_created[0].place(relx=lc_x , y = lc_y + 3*lc_y_delta)
elif self._line_to_struc[self._active_line][5] is not None:
pass
elif combination != 'Cylinder':
# creating new label, checkbox and entry. creating new list of created items.
# finding loads applied to lines
counter = 0
if len(self._load_dict) != 0 and combination !='manual':
for load, data in self._load_dict.items():
if self._active_line in self._load_dict[load][1] and data[0].get_limit_state() == 'ULS':
name = (combination,self._active_line,str(load)) #tuple to identify combinations on line
self._lc_comb_created.append(ttk.Label(self._main_fr, text = load,
font = self._text_size['Text 8 bold'],
))
self._lc_comb_created.append(ttk.Entry(self._main_fr,
textvariable =self._new_load_comb_dict[name][0],
width=5,
))
self._lc_comb_created.append(ttk.Entry(self._main_fr,
textvariable=self._new_load_comb_dict[name][1],
width=5,
))
self._lc_comb_created.append(ttk.Checkbutton(self._main_fr,
variable =self._new_load_comb_dict[name][2]))
for load_no in range(int(len(self._lc_comb_created)/4)):
self._lc_comb_created[0+load_no*4].place(relx=lc_x, rely=lc_y+lc_y_delta*load_no)
self._lc_comb_created[1+load_no*4].place(relx=lc_x+5*lc_x_delta, rely=lc_y+lc_y_delta*load_no)
self._lc_comb_created[2+load_no*4].place(relx=lc_x+6*lc_x_delta, rely=lc_y+lc_y_delta*load_no)
self._lc_comb_created[3+load_no*4].place(relx=lc_x+7*lc_x_delta, rely=lc_y+lc_y_delta*load_no)
counter += 1
# finding tank loads applied to line (automatically created compartments.
lc_y += 0.023148148*counter
counter = 0
if len(self._tank_dict) != 0 and combination !='manual':
for compartment in self.get_compartments_for_line(self._active_line):
name = (combination,self._active_line,'comp' + str(compartment)) #tuple to identify combinations on line
self._comp_comb_created.append(ttk.Label(self._main_fr, text='Compartment'+str(compartment),
))
self._comp_comb_created.append(ttk.Entry(self._main_fr,
textvariable=self._new_load_comb_dict[name][0],
width=5,
))
self._comp_comb_created.append(ttk.Entry(self._main_fr,
textvariable=self._new_load_comb_dict[name][1],
width=5,
))
self._comp_comb_created.append(ttk.Checkbutton(self._main_fr,
variable = self._new_load_comb_dict[name][2]))
for comp_no in range(int(len(self._comp_comb_created)/4)):
self._comp_comb_created[0+comp_no*4].place(relx=lc_x, rely=lc_y+lc_y_delta*comp_no)
self._comp_comb_created[1+comp_no*4].place(relx=lc_x+5*lc_x_delta, rely=lc_y+lc_y_delta*comp_no)
self._comp_comb_created[2+comp_no*4].place(relx=lc_x+6*lc_x_delta, rely=lc_y+lc_y_delta*comp_no)
self._comp_comb_created[3+comp_no*4].place(relx=lc_x+7*lc_x_delta, rely=lc_y+lc_y_delta*comp_no)
counter += 1
lc_y += 0.027777778*counter
# finding manual loads applied to the line
name = ('manual', self._active_line, 'manual') # tuple to identify combinations on line
if name in self._new_load_comb_dict.keys():
self._manual_created.append(ttk.Label(self._main_fr, text='Manual (pressure/LF)',
))
self._manual_created.append(
ttk.Entry(self._main_fr, textvariable=self._new_load_comb_dict[name][0], width=15,
))
self._manual_created.append(
ttk.Entry(self._main_fr, textvariable=self._new_load_comb_dict[name][1], width=6,
))
self._manual_created.append(ttk.Checkbutton(self._main_fr, variable=self._new_load_comb_dict[name][2]))
self._manual_created[0].place(relx=lc_x, rely=lc_y)
self._manual_created[1].place(relx=lc_x + 4 * lc_x_delta, rely=lc_y)
self._manual_created[2].place(relx=lc_x + 6 * lc_x_delta, rely=lc_y)
self._manual_created[3].place(relx=lc_x + 7 * lc_x_delta, rely=lc_y)
if self._line_to_struc[self._active_line][5] is None:
results = self.calculate_all_load_combinations_for_line(self._active_line)
self._result_label_dnva.config(text = 'DNV a [Pa]: ' + str(results['dnva']),
font = self._text_size['Text 8'])
self._result_label_dnvb.config(text = 'DNV b [Pa]: ' + str(results['dnvb']),
font = self._text_size['Text 8'])
self._result_label_tanktest.config(text = 'TT [Pa]: ' + str(results['tanktest']),
font = self._text_size['Text 8'])
self._result_label_manual.config(text = 'Manual [Pa]: ' + str(results['manual']))
lc_y = self.results_gui_start+0.018518519
self._result_label_dnva.place(relx = lc_x+0*lc_x_delta, rely = lc_y+lc_y_delta*1.5)
self._result_label_dnvb.place(relx=lc_x+4*lc_x_delta, rely=lc_y+lc_y_delta*1.5)
self._result_label_tanktest.place(relx=lc_x+0*lc_x_delta, rely=lc_y+2.4*lc_y_delta)
self._result_label_manual.place(relx=lc_x+4*lc_x_delta, rely=lc_y+2.4*lc_y_delta)
self._lab_pressure.place(relx=0.786458333, rely=self.results_gui_start)
else:
for item in [self._result_label_dnva,self._result_label_dnvb,
self._result_label_tanktest,self._result_label_manual,self._lab_pressure ]:
item.place_forget()
#self._combination_slider.set(4)
def slider_used(self, event):
'''
Action when slider is activated.
:return:
'''
self._canvas_scale = self._slider.get()
self.update_frame()
def grid_operations(self, line, coordinates):
'''
Creating a grid in the canvas used for various caluclations
:return:
'''
try:
if self._line_to_struc[line][0].Plate.get_structure_type() not in ('GENERAL_INTERNAL_NONWT','FRAME'):
self._pending_grid_draw[line] = coordinates
except KeyError:
pass
def grid_find_tanks(self, animate = False):
'''
Printing the grid in a separate window
:return:
'''
if self._line_to_struc == {}:
tk.messagebox.showerror('Search error','No geometry with properties exist.')
return
#setting the button to red
try:
img_file_name = 'img_int_pressure_button_search.gif'
if os.path.isfile('images/' + img_file_name):
file_path = 'images/' + img_file_name
else:
file_path = self._root_dir + '/images/' + img_file_name
photo = tk.PhotoImage(file=file_path)
self._int_button.config(image = photo)
self._int_button.image = photo
except TclError:
pass
animate = tk.messagebox.askquestion('Search for compartments','Searching for compartments will use a large matrix to '
'identify watertight members and consequently the '
'enclosed compartments. \n'
'You may animate the search for vizualization and '
'increased understating purposes.\n '
'However, this will take some more time than just '
'showing the final result.\n'
'\n'
'Yes - Show search animation\n'
'No - Draw final result only\n'
'\n'
'Choose yes or no.' )
animate = True if animate == 'yes' else False
self._main_grid.clear()
self._tank_dict = {}
self._pending_grid_draw={}
self._compartments_listbox.delete(0,'end')
for line, points in self._line_dict.items():
# making the lines made by used in the grid
p1 = self._point_dict['point'+str(points[0])]
p2 = self._point_dict['point'+str(points[1])]
self.grid_operations(line, [self.get_grid_coord_from_points_coords(p1),
self.get_grid_coord_from_points_coords(p2)])
self._grid_calc = grid_window.CreateGridWindow(self._main_grid, self._canvas_dim,
self._pending_grid_draw, self._canvas_base_origo)
compartment_search_return = self._grid_calc.search_bfs(animate=animate)
for comp_no, properties in compartment_search_return['compartments'].items():
# finding actual max min elevation from grid
min_el = (float('inf'), float('inf'))
max_el = (-float('inf'),-float('inf'))
if comp_no > 1:
self._compartments_listbox.insert('end', comp_no)
for corner in properties[1]:
corner_real = self.get_point_coords_from_grid_coords(corner)
if self.get_point_coords_from_grid_coords(corner)[1] < min_el[1]:
min_el = self.get_closest_point(corner_real)[1]
if self.get_point_coords_from_grid_coords(corner)[1] > max_el[1]:
max_el = self.get_closest_point(corner_real)[1]
self.new_tank(int(comp_no),properties[0], min_el, max_el)
comp_name = 'comp'+str(int(comp_no))
for combination in self._load_factors_dict.keys():
#creating the load factor combinations for tanks.
for line in self._line_dict.keys():
if comp_no in self.get_compartments_for_line(line):
name = (combination, line, comp_name)
self._new_load_comb_dict[name] = [tk.DoubleVar(), tk.DoubleVar(), tk.IntVar()]
self._new_load_comb_dict[name][0].set(self._load_factors_dict[combination][1])
self._new_load_comb_dict[name][1].set(self._load_factors_dict[combination][2])
self._new_load_comb_dict[name][2].set(1)
try:
img_file_name = 'img_int_pressure_button.gif'
if os.path.isfile('images/' + img_file_name):
file_path = 'images/' + img_file_name
else:
file_path = self._root_dir + '/images/' + img_file_name
photo = tk.PhotoImage(file=file_path)
self._int_button.config(image = photo)
self._int_button.image = photo
except TclError:
pass
if animate == False:
tank_count = None if len(self._tank_dict)==0 else len(self._tank_dict)
if tank_count is not None:
self._grid_calc.draw_grid(tank_count=tank_count)
else:
tank_count = None if len(self._tank_dict) == 0 else len(self._tank_dict)
if tank_count is not None:
self._grid_calc.animate_grid(grids_to_animate=compartment_search_return['grids'],
tank_count = None if len(self._tank_dict)==0 else len(self._tank_dict))
self.get_cob() # Calculating COB
self.update_frame()
def grid_display_tanks(self, save = False):
'''
Opening matplotlib grid illustation
:return:
'''
try:
if self._grid_calc != None:
self._grid_calc.draw_grid(save = save,
tank_count=None if len(self._tank_dict)==0 else len(self._tank_dict) )
except RecursionError:
pass
def add_to_combinations_dict(self,line):
'''
When creating new line and tanks exist, the combinations dict must be updated.
:param line:
:return:
'''
if len(self._tank_dict) != 0:
for compartment in self.get_compartments_for_line(line):
for combination in self._load_factors_dict.keys():
name = (combination, line, 'comp'+str(compartment))
self._new_load_comb_dict[name] = [tk.DoubleVar(), tk.DoubleVar(), tk.IntVar()]
self._new_load_comb_dict[name][0].set(self._load_factors_dict[combination][1])
self._new_load_comb_dict[name][1].set(self._load_factors_dict[combination][2])
self._new_load_comb_dict[name][2].set(1)
else:
pass
name = ('manual', line, 'manual')
self._new_load_comb_dict[name] = [tk.DoubleVar(), tk.DoubleVar(), tk.IntVar()]
self._new_load_comb_dict[name][0].set(0)
self._new_load_comb_dict[name][1].set(0)
self._new_load_comb_dict[name][2].set(0)
def trace_shift_change(self, *args):
try:
self.update_frame()
except (TclError, ZeroDivisionError):
pass
def trace_acceptance_change(self, *args):
try:
self.update_frame()
for key, val in self._line_to_struc.items():
val[0].need_recalc = True
except (TclError, ZeroDivisionError):
pass
def update_frame(self, event = None, *args):
state = self.get_color_and_calc_state()
self.draw_results(state=state)
self.draw_canvas(state=state)
self.draw_prop()
#self.trace_puls_up_or_sp()
return state
def get_color_and_calc_state(self, current_line = None, active_line_only = False):
''' Return calculations and colors for line and results. '''
return_dict = {'colors': {}, 'section_modulus': {}, 'thickness': {}, 'shear_area': {}, 'buckling': {},
'fatigue': {}, 'pressure_uls': {}, 'pressure_fls': {},
'all_obj': {}, 'scant_calc_obj': {}, 'fatigue_obj': {}, 'utilization': {}, 'slamming': {},
'color code': {}, 'PULS colors': {}, 'ML buckling colors' : {}, 'ML buckling class' : {},
'weights': {}, 'cylinder': {}}
return_dict['slamming'][current_line] = {}
if current_line is None and active_line_only:
line_iterator = [self._active_line, ]
elif current_line is None and not active_line_only and len(self._line_dict) != 0:
line_iterator = self._line_dict.keys()
elif current_line is not None:
line_iterator = [current_line, ]
elif current_line not in self._line_to_struc.keys() and active_line_only:
return return_dict
else:
return return_dict
rec_for_color = {}
# Cylinder general
all_cyl_thk, recorded_cyl_long_stf = list(), list()
for obj_list in self._line_to_struc.values():
if obj_list[5] is not None:
all_cyl_thk.append(round(obj_list[5].ShellObj.thk * 1000, 2))
recorded_cyl_long_stf.append(obj_list[5].LongStfObj.get_beam_string())
all_cyl_thk = np.unique(all_cyl_thk)
all_cyl_thk = np.sort(all_cyl_thk)
for current_line in line_iterator:
rec_for_color[current_line] = {}
slamming_pressure = 0
if current_line in self._line_to_struc.keys():
if self._line_to_struc[current_line][5] is not None:
cyl_obj = self._line_to_struc[current_line][5]
cyl_radius = round(cyl_obj.ShellObj.radius * 1000, 2)
cyl_thickness = round(cyl_obj.ShellObj.thk * 1000, 2)
cyl_long_str = cyl_obj.LongStfObj.get_beam_string()
cyl_ring_stf = cyl_obj.LongStfObj.get_beam_string()
cyl_heavy_ring = cyl_obj.LongStfObj.get_beam_string()
cyl_span = round(cyl_obj.ShellObj.dist_between_rings, 1)
cyl_tot_length = round(cyl_obj.ShellObj.length_of_shell, 1)
cyl_tot_cyl = round(cyl_obj.ShellObj.tot_cyl_length, 1)
cyl_sigma_axial = cyl_obj.sasd / 1e6
cyl_sigma_bend = cyl_obj.smsd / 1e6
cyl_sigma_tors = cyl_obj.tTsd / 1e6
cyl_tau_xy = cyl_obj.tQsd / 1e6
cyl_lat_press = cyl_obj.psd / 1e6
cyl_sigma_hoop = cyl_obj.shsd / 1e6
cyl_results = cyl_obj.get_utilization_factors()
else:
cyl_thickness = 0
all_obj = self._line_to_struc[current_line][0]
obj_scnt_calc_pl = all_obj.Plate #self._line_to_struc[current_line][1]
obj_scnt_calc_stf = all_obj.Stiffener # self._line_to_struc[current_line][1]
obj_scnt_calc_girder = all_obj.Girder # self._line_to_struc[current_line][1]
return_dict['all_obj'][current_line] = all_obj
if all_obj.need_recalc is False:
return self._state_logger[current_line]
try:
norm_and_slam = self.get_highest_pressure(current_line)
design_pressure = norm_and_slam['normal'] / 1000
if norm_and_slam['slamming'] is None:
pass
else:
slamming_dict = self.get_highest_pressure(current_line)
slamming_pressure = slamming_dict['slamming']
slamming_red_fac_pl = slamming_dict['slamming plate reduction factor']
slamming_red_fac_stf = slamming_dict['slamming stf reduction factor']
except KeyError:
design_pressure = 0
min_thk = obj_scnt_calc_pl.get_dnv_min_thickness(design_pressure)
color_thk = 'green' if obj_scnt_calc_pl.is_acceptable_pl_thk(design_pressure) else 'red'
rec_for_color[current_line]['plate thickness'] = (min_thk / 1000) / obj_scnt_calc_pl.get_pl_thk()
all_obj.lat_press = design_pressure/1000
buckling = all_obj.plate_buckling()
all_buckling_uf_list = list()
for buc_domain, domain_results in buckling.items():
for uf_text, uf_num in domain_results.items():
if buc_domain != 'Local buckling':
all_buckling_uf_list.append(uf_num)
color_buckling = 'green' if all([uf <= 1 for uf in all_buckling_uf_list]) \
else 'red'
rec_for_color[current_line]['rp buckling'] = max(all_buckling_uf_list)
if obj_scnt_calc_stf is not None:
sec_mod = [obj_scnt_calc_stf.get_section_modulus()[0],
obj_scnt_calc_stf.get_section_modulus ()[1]]
shear_area = obj_scnt_calc_stf.get_shear_area()
min_shear = obj_scnt_calc_stf.get_minimum_shear_area(design_pressure)
min_sec_mod = obj_scnt_calc_stf.get_dnv_min_section_modulus(design_pressure)
rec_for_color[current_line]['section modulus'] = min_sec_mod / min(sec_mod)
rec_for_color[current_line]['shear'] = min_shear/shear_area
return_dict['slamming'][current_line] = dict()
if slamming_pressure is not None and slamming_pressure > 0:
return_dict['slamming'][current_line]['state'] = True
else:
return_dict['slamming'][current_line]['state'] = False
try:
fatigue_obj = self._line_to_struc[current_line][2]
p_int = self.get_fatigue_pressures(current_line, fatigue_obj.get_accelerations())['p_int']
p_ext = self.get_fatigue_pressures(current_line, fatigue_obj.get_accelerations())['p_ext']
damage = fatigue_obj.get_total_damage(int_press=(p_int['loaded'], p_int['ballast'],
p_int['part']), ext_press=(p_ext['loaded'],
p_ext['ballast'],
p_ext['part']))
dff = fatigue_obj.get_dff()
color_fatigue = 'green' if damage * dff <= 1 else 'red'
except AttributeError:
fatigue_obj, p_int, p_ext, damage, dff = [None for dummy in range(5)]
color_fatigue = 'green'
color_sec = 'green' if obj_scnt_calc_stf.is_acceptable_sec_mod(sec_mod, design_pressure) \
else 'red'
color_shear = 'green' if obj_scnt_calc_stf.is_acceptable_shear_area(shear_area, design_pressure) \
else 'red'
else:
sec_mod = [0,0]
rec_for_color[current_line]['section modulus'] = 0.0
rec_for_color[current_line]['shear'] = 0
return_dict['slamming'][current_line] = dict()
fatigue_obj, p_int, p_ext, damage, dff = [None for dummy in range(5)]
color_sec = 'green' if all_obj.Stiffener is None else 'black'
color_shear = 'green' if all_obj.Stiffener is None else'black'
return_dict['slamming'][current_line]['state'] = False
if slamming_pressure is not None and slamming_pressure > 0 and obj_scnt_calc_stf is not None:
slamming_res = obj_scnt_calc_stf.calculate_slamming_stiffener(slamming_pressure,
red_fac=slamming_red_fac_pl)
min_pl_slamming = obj_scnt_calc_stf.calculate_slamming_plate(slamming_pressure,
red_fac=slamming_red_fac_stf)
if slamming_res['Zp_req'] is not None:
zpl = obj_scnt_calc_stf.get_net_effective_plastic_section_modulus()
zpl_req = slamming_res['Zp_req']
color_sec = 'green' if zpl >= zpl_req else 'red'
else:
zpl = obj_scnt_calc_stf.get_net_effective_plastic_section_modulus()
zpl_req = None
color_sec = 'red'
color_shear = 'green' if round(obj_scnt_calc_stf.get_web_thk()* 1000,1) >= \
round(slamming_res['tw_req'],1) else 'red'
color_thk = 'green' if round(obj_scnt_calc_stf.get_pl_thk() * 1000,1) >= \
round(min_pl_slamming,1) else 'red'
return_dict['slamming'][current_line]['zpl'] = zpl
return_dict['slamming'][current_line]['zpl_req'] = zpl_req
return_dict['slamming'][current_line]['min_plate_thk'] = min_pl_slamming
return_dict['slamming'][current_line]['min_web_thk'] = slamming_res['tw_req']
return_dict['colors'][current_line] = {'buckling': color_buckling, 'fatigue': color_fatigue,
'section': color_sec, 'shear': color_shear,
'thickness': color_thk}
'''
Cylinder calculations
'''
if self._line_to_struc[current_line][5] is not None:
return_dict['cylinder'][current_line] = cyl_results
'''
PULS calculations
'''
if self._PULS_results != None:
res = self._PULS_results.get_puls_line_results(current_line)
if res is not None:
geo_problem = False
if type(res['Ultimate capacity']['Actual usage Factor'][0]) != str:
ufnum = res['Ultimate capacity']['Actual usage Factor'][0] / self._new_puls_uf.get()
rec_for_color[current_line]['PULS ultimate']=ufnum
col_ult = 'green' if ufnum < 1 else 'red'
else:
geo_problem = True
col_ult = 'red'
if res['Buckling strength']['Actual usage Factor'][0] is not None:
bnum = res['Buckling strength']['Actual usage Factor'][0] / self._new_puls_uf.get()
rec_for_color[current_line]['PULS buckling'] = bnum
col_buc = 'green' if bnum < 1 else 'red'
else:
col_buc = 'red'
if geo_problem:
loc_geom = 'red'
else:
if obj_scnt_calc_stf is None:
loc_label = 'Geom. Req (PULS validity limits)'
else:
loc_label = 'Local geom req (PULS validity limits)' if \
obj_scnt_calc_stf.get_puls_sp_or_up() == 'SP' else 'Geom. Req (PULS validity limits)'
loc_geom = 'green' if all([val[0] == 'Ok' for val in res[loc_label].values()]) else 'red'
if obj_scnt_calc_stf is None:
csr_label = 'CSR-Tank req'
else:
csr_label = 'CSR-Tank requirements (primary stiffeners)' if \
obj_scnt_calc_stf.get_puls_sp_or_up() == 'SP' else'CSR-Tank req'
csr_geom = 'green' if all([val[0] in ['Ok', '-'] for val in res[csr_label].values()]) else 'red'
return_dict['PULS colors'][current_line] = {'ultimate': col_ult, 'buckling': col_buc,
'local geometry': loc_geom, 'csr': csr_geom}
else:
return_dict['PULS colors'][current_line] = {'ultimate': 'black', 'buckling': 'black',
'local geometry': 'black', 'csr': 'black'}
else:
return_dict['PULS colors'][current_line] = {'ultimate': 'black', 'buckling': 'black',
'local geometry': 'black', 'csr': 'black'}
'''
Machine learning buckling
['cl SP buc int predictor', 'cl SP buc int scaler',
'cl SP ult int predictor', 'cl SP ult int scaler',
'cl SP buc GLGT predictor', 'cl SP buc GLGT scaler',
'cl SP ult GLGT predictor', 'cl SP ult GLGT scaler']
'''
if obj_scnt_calc_pl.get_puls_sp_or_up() == 'UP':
buckling_ml_input = obj_scnt_calc_pl.get_buckling_ml_input(design_lat_press=design_pressure)
if obj_scnt_calc_pl.get_puls_boundary() == 'Int':
if self._ML_buckling['cl UP buc int predictor'] != None:
x_buc = self._ML_buckling['cl UP buc int scaler'].transform(buckling_ml_input)
y_pred_buc = self._ML_buckling['cl UP buc int predictor'].predict(x_buc)[0]
else:
y_pred_buc = 0
if self._ML_buckling['cl UP ult int predictor'] != None:
x_ult = self._ML_buckling['cl UP ult int scaler'].transform(buckling_ml_input)
y_pred_ult = self._ML_buckling['cl UP ult int predictor'].predict(x_ult)[0]
else:
y_pred_ult = 0
else:
if self._ML_buckling['cl UP buc GLGT predictor'] != None:
x_buc = self._ML_buckling['cl UP buc GLGT scaler'].transform(buckling_ml_input)
y_pred_buc = self._ML_buckling['cl UP buc GLGT predictor'].predict(x_buc)[0]
else:
y_pred_buc = 0
if self._ML_buckling['cl UP ult GLGT predictor'] != None:
x_ult = self._ML_buckling['cl UP ult GLGT scaler'].transform(buckling_ml_input)
y_pred_ult = self._ML_buckling['cl UP ult GLGT predictor'].predict(x_ult)[0]
else:
y_pred_ult = 0
x_csr = obj_scnt_calc_pl.get_buckling_ml_input(design_lat_press=design_pressure, csr = True)
x_csr = self._ML_buckling['CSR scaler UP'].transform(x_csr)
csr_pl = self._ML_buckling['CSR predictor UP'].predict(x_csr)[0]
return_dict['ML buckling colors'][current_line] = \
{'buckling': 'green' if int(y_pred_buc) == 9 else 'red',
'ultimate': 'green' if int(y_pred_ult) == 9 else 'red',
'CSR requirement': 'green' if csr_pl == 1 else 'red'}
return_dict['ML buckling class'][current_line] = {'buckling': int(y_pred_buc),
'ultimate': int(y_pred_ult),
'CSR': [csr_pl, float('inf'),
float('inf'), float('inf')]}
else:
buckling_ml_input = obj_scnt_calc_stf.get_buckling_ml_input(design_lat_press=design_pressure)
if obj_scnt_calc_stf.get_puls_boundary() == 'Int':
if self._ML_buckling['cl SP buc int predictor'] != None:
x_buc = self._ML_buckling['cl SP buc int scaler'].transform(buckling_ml_input)
y_pred_buc = self._ML_buckling['cl SP buc int predictor'].predict(x_buc)[0]
else:
y_pred_buc = 0
if self._ML_buckling['cl SP ult int predictor'] != None:
x_ult = self._ML_buckling['cl SP ult int scaler'].transform(buckling_ml_input)
y_pred_ult = self._ML_buckling['cl SP ult int predictor'].predict(x_ult)[0]
else:
y_pred_ult = 0
else:
if self._ML_buckling['cl SP buc GLGT predictor'] != None:
x_buc = self._ML_buckling['cl SP buc GLGT scaler'].transform(buckling_ml_input)
y_pred_buc = self._ML_buckling['cl SP buc GLGT predictor'].predict(x_buc)[0]
else:
y_pred_buc = 0
if self._ML_buckling['cl SP ult GLGT predictor'] != None:
x_ult = self._ML_buckling['cl SP ult GLGT scaler'].transform(buckling_ml_input)
y_pred_ult = self._ML_buckling['cl SP ult GLGT predictor'].predict(x_ult)[0]
else:
y_pred_ult = 0
x_csr = obj_scnt_calc_stf.get_buckling_ml_input(design_lat_press=design_pressure, csr = True)
x_csr = self._ML_buckling['CSR scaler SP'].transform(x_csr)
csr_pl, csr_web, csr_web_fl, csr_fl = self._ML_buckling['CSR predictor SP'].predict(x_csr)[0]
return_dict['ML buckling colors'][current_line] = \
{'buckling': 'green' if int(y_pred_buc) == 9 else 'red',
'ultimate': 'green' if int(y_pred_ult) == 9 else 'red',
'CSR requirement': 'green' if
all([csr_pl == 1, csr_web == 1, csr_web_fl == 1, csr_fl == 1]) else 'red'}
return_dict['ML buckling class'][current_line] = {'buckling': int(y_pred_buc),
'ultimate': int(y_pred_ult),
'CSR': [csr_pl, csr_web, csr_web_fl, csr_fl]}
'''
Weight calculations for line.
'''
# TODO only works for stiffened panel!
if obj_scnt_calc_stf is not None:
line_weight = op.calc_weight([obj_scnt_calc_stf.get_s(), obj_scnt_calc_stf.get_pl_thk(),
obj_scnt_calc_stf.get_web_h(), obj_scnt_calc_stf.get_web_thk(),
obj_scnt_calc_stf.get_fl_w(), obj_scnt_calc_stf.get_fl_thk(),
obj_scnt_calc_stf.get_span(), obj_scnt_calc_stf.get_lg()])
else:
line_weight = 0
points = self._line_dict[current_line]
p1 = self._point_dict['point' + str(points[0])]
p2 = self._point_dict['point' + str(points[1])]
mid_coord = [(p1[0]+p2[0])/2, (p1[1]+p2[1])/2]
return_dict['weights'][current_line] = {'line weight': line_weight, 'mid_coord': mid_coord}
'''
xxxxxxx
'''
return_dict['buckling'][current_line] = buckling
return_dict['pressure_uls'][current_line] = design_pressure
return_dict['pressure_fls'][current_line] = {'p_int': p_int, 'p_ext': p_ext}
return_dict['section_modulus'][current_line] = {'sec_mod': sec_mod, 'min_sec_mod': 0} if \
obj_scnt_calc_stf is None else {'sec_mod': sec_mod, 'min_sec_mod': min_sec_mod}
return_dict['shear_area'][current_line] = {'shear_area': 0, 'min_shear_area': 0} if \
obj_scnt_calc_stf is None else{'shear_area': shear_area, 'min_shear_area': min_shear}
return_dict['thickness'][current_line] = {'thk': obj_scnt_calc_pl.get_pl_thk(), 'min_thk': min_thk}
return_dict['fatigue_obj'][current_line] = fatigue_obj
return_dict['color code'][current_line] = {}
if fatigue_obj is not None:
return_dict['fatigue'][current_line] = {'damage': damage, 'dff': dff,
'curve': fatigue_obj.get_sn_curve()}
rec_for_color[current_line]['fatigue'] = damage*dff
else:
return_dict['fatigue'][current_line] = {'damage': None, 'dff': None, 'curve': None}
rec_for_color[current_line]['fatigue'] = 0
fat_util = 0 if damage is None else damage * dff
shear_util = 0 if shear_area == 0 else min_shear / shear_area
thk_util = 0 if obj_scnt_calc_pl.get_pl_thk() == 0 else min_thk / (1000 * obj_scnt_calc_pl.get_pl_thk())
sec_util = 0 if min(sec_mod) == 0 else min_sec_mod / min(sec_mod)
buc_util = 1 if float('inf') in buckling else max(all_buckling_uf_list)
rec_for_color[current_line]['rp buckling'] = max(all_buckling_uf_list)
return_dict['utilization'][current_line] = {'buckling': buc_util,
'PULS buckling': buc_util,
'fatigue': fat_util,
'section': sec_util,
'shear': shear_util,
'thickness': thk_util}
# Color coding state
self._state_logger[current_line] = return_dict # Logging the current state of the line.
self._line_to_struc[current_line][0].need_recalc = False
else:
pass
sec_in_model, idx, recorded_sections = dict(), 0, list()
cyl_sec_in_model, idx_cyl, recorded_cyl_sections = dict(), 0, list()
for data in self._line_to_struc.values():
if data[0].Stiffener is not None:
if data[0].Stiffener.get_beam_string() not in recorded_sections:
sec_in_model[data[0].Stiffener.get_beam_string()] = idx
recorded_sections.append(data[0].Stiffener.get_beam_string())
idx += 1
if data[5] is not None:
if data[5].LongStfObj.get_beam_string() not in recorded_cyl_sections:
cyl_sec_in_model[ data[5].LongStfObj.get_beam_string()] = idx_cyl
recorded_cyl_sections.append(data[5].LongStfObj.get_beam_string())
idx_cyl += 1
sec_in_model['length'] = len(recorded_sections)
cyl_sec_in_model['length'] = len(recorded_cyl_sections)
if self._line_to_struc != {}:
sec_mod_map = np.arange(0,1.1,0.1)
fat_map = np.arange(0,1.1,0.1)
all_thicknesses = [round(objs[0].Plate.get_pl_thk(), 5) for objs in self._line_to_struc.values()]
all_thicknesses = np.unique(all_thicknesses).tolist()
thickest_plate = max(all_thicknesses)
if len(all_thicknesses) > 1:
thk_map = np.arange(min(all_thicknesses), max(all_thicknesses) + (max(all_thicknesses) -
min(all_thicknesses)) / 10,
(max(all_thicknesses) - min(all_thicknesses)) / 10)
else:
thk_map = all_thicknesses
# if self._line_to_struc[current_line][5] is not None:
# all_cyl_thk = all_cyl_thk.tolist()
# if len(all_cyl_thk) > 1:
# thk_map_cyl = np.arange(min(all_cyl_thk), max(all_cyl_thk) + (max(all_cyl_thk) -
# min(all_cyl_thk)) / 10,
# (max(all_cyl_thk) - min(all_cyl_thk)) / 10)
# else:
# thk_map_cyl = all_cyl_thk
# else:
# thk_map_cyl = [1,]
try:
all_pressures = sorted([self.get_highest_pressure(line)['normal']
for line in list(self._line_dict.keys())])
except KeyError:
all_pressures = [0, 1]
all_pressures = np.unique(all_pressures).tolist()
highest_pressure, lowest_pressure = max(all_pressures), min(all_pressures)
if len(all_pressures) > 1:
press_map = [round(val, 1) for val in
np.arange(all_pressures[0], all_pressures[-1],
(all_pressures[-1] - all_pressures[0]) / 10)] + \
[round(all_pressures[-1], 1)]
else:
press_map = all_pressures
all_utils = [max(list(return_dict['utilization'][line].values()))
for line in self._line_to_struc.keys()]
all_utils = np.unique(all_utils).tolist()
if len(all_utils) >1:
util_map = np.arange(0, 1.1, 0.1)
else:
util_map = all_utils
if self._PULS_results is not None:
#puls_util_map = self._PULS_results.all_uf
puls_util_map = list()
for key, val in self._line_to_struc.items():
puls_util_map.append(self._PULS_results.get_utilization(key, val[0].Plate.get_puls_method(),
acceptance = self._new_puls_uf.get()))
puls_util_map = np.arange(0, 1.1, 0.1)
else:
puls_util_map = None
sig_x = np.unique([self._line_to_struc[line][0].Plate.get_sigma_x1() for line in
self._line_to_struc.keys()]).tolist()
if len(sig_x) > 1: # TODO color coding when using sig_x1 and sig_x2 (23.12.2021)
sig_x_map = np.arange(min(sig_x), max(sig_x) + (max(sig_x) - min(sig_x)) / 10,
(max(sig_x) - min(sig_x)) / 10)
else:
sig_x_map = sig_x
sig_y1 = np.unique([self._line_to_struc[line][0].Plate.get_sigma_y1() for line in
self._line_to_struc.keys()]).tolist()
if len(sig_y1) > 1:
sig_y1_map = np.arange(min(sig_y1), max(sig_y1) + (max(sig_y1) - min(sig_y1)) / 10,
(max(sig_y1) - min(sig_y1)) / 10)
else:
sig_y1_map = sig_y1
sig_y2 = np.unique([self._line_to_struc[line][0].Plate.get_sigma_y2() for line in
self._line_to_struc.keys()]).tolist()
if len(sig_y2) > 1:
sig_y2_map = np.arange(min(sig_y2), max(sig_y2) + (max(sig_y2) - min(sig_y2)) / 10,
(max(sig_y2) - min(sig_y2)) / 10)
else:
sig_y2_map = sig_y2
tau_xy = np.unique([self._line_to_struc[line][0].Plate.get_tau_xy() for line in
self._line_to_struc.keys()]).tolist()
if len(tau_xy) > 1:
tau_xy_map = np.arange(min(tau_xy), max(tau_xy) + (max(tau_xy) - min(tau_xy)) / 10,
(max(tau_xy) - min(tau_xy)) / 10)
else:
tau_xy_map = tau_xy
spacings = list()
for line in self._line_to_struc.keys():
if self._line_to_struc[line][0].Stiffener is not None:
spacings.append(self._line_to_struc[line][0].Stiffener.get_s())
spacing = np.unique(spacings).tolist()
structure_type = [self._line_to_struc[line][0].Plate.get_structure_type() for line in
self._line_to_struc.keys()]
return_dict['color code'] = {'thickest plate': thickest_plate, 'thickness map': thk_map,
'all thicknesses': all_thicknesses, 'all cyl thicknesses': all_cyl_thk,
'section modulus map': sec_mod_map,
'fatigue map': fat_map,
'highest pressure': highest_pressure, 'lowest pressure': lowest_pressure,
'pressure map': press_map, 'all pressures':all_pressures,
'all utilizations': all_utils, 'utilization map': util_map,
'PULS utilization map': puls_util_map,
'max sigma x': max(sig_x), 'min sigma x': min(sig_x), 'sigma x map': sig_x_map,
'max sigma y1': max(sig_y1), 'min sigma y1': min(sig_y1),
'sigma y1 map': sig_y1_map,
'max sigma y2': max(sig_y2), 'min sigma y2': min(sig_y2),
'sigma y2 map': sig_y2_map,
'max tau xy': max(tau_xy), 'min tau xy': min(tau_xy), 'tau xy map': tau_xy_map,
'structure types map': np.unique(structure_type).tolist(),
'sections in model': sec_in_model,
'cyl sections in model': cyl_sec_in_model,
'recorded sections': recorded_sections,
'recorded cylinder long sections' : recorded_cyl_sections,
'spacings': spacing, 'max spacing': max(spacing), 'min spacing': min(spacing)}
line_color_coding, puls_method_map, puls_sp_or_up_map = \
{}, {None: 0, 'buckling': 0.5, 'ultimate': 1}, {None:0, 'SP': 0.5, 'UP': 1}
cmap_sections = plt.get_cmap('jet')
thk_sort_unique = return_dict['color code']['all thicknesses']
spacing_sort_unique = return_dict['color code']['spacings']
structure_type_unique = return_dict['color code']['structure types map']
tot_weight, weight_mult_dist_x, weight_mult_dist_y = 0, 0,0
for line, line_data in self._line_to_struc.items():
if self._PULS_results is None:
puls_color, buc_uf, puls_uf, puls_method, puls_sp_or_up = 'black', 0, 0, None, None
elif self._PULS_results.get_utilization(line, self._line_to_struc[line][0].Plate.get_puls_method(),
self._new_puls_uf.get()) == None:
puls_color, buc_uf, puls_uf, puls_method, puls_sp_or_up = 'black', 0,0, None, None
else:
puls_method = self._line_to_struc[line][0].Plate.get_puls_method()
puls_uf = self._PULS_results.get_utilization(
line, puls_method,
self._new_puls_uf.get())
puls_color = matplotlib.colors.rgb2hex(cmap_sections(puls_uf))
puls_sp_or_up = self._line_to_struc[line][0].Plate.get_puls_sp_or_up()
# Cylinders
if self._line_to_struc[line][5] is not None:
cyl_obj = self._line_to_struc[line][5]
cyl_radius = round(cyl_obj.ShellObj.radius * 1000, 2)
cyl_thickness = round(cyl_obj.ShellObj.thk * 1000, 2)
cyl_long_str = cyl_obj.LongStfObj.get_beam_string()
cyl_ring_stf = cyl_obj.LongStfObj.get_beam_string()
cyl_heavy_ring = cyl_obj.LongStfObj.get_beam_string()
cyl_span = round(cyl_obj.ShellObj.dist_between_rings, 1)
cyl_tot_length = round(cyl_obj.ShellObj.length_of_shell, 1)
cyl_tot_cyl = round(cyl_obj.ShellObj.tot_cyl_length, 1)
cyl_sigma_axial = cyl_obj.sasd / 1e6
cyl_sigma_bend = cyl_obj.smsd / 1e6
cyl_sigma_tors = cyl_obj.tTsd / 1e6
tau_xy = cyl_obj.tQsd / 1e6
cyl_lat_press = cyl_obj.psd / 1e6
cyl_sigma_hoop = cyl_obj.shsd / 1e6
cyl_results = cyl_obj.get_utilization_factors()
cyl_uf = max([round(0 if cyl_results['Unstiffened shell'] is None else
cyl_results['Unstiffened shell'],2),
round(0 if cyl_results['Longitudinal stiffened shell'] is None else
cyl_results['Longitudinal stiffened shell'],2),
round(0 if cyl_results['Ring stiffened shell'] is None else
cyl_results['Ring stiffened shell'],2),
round(0 if cyl_results['Heavy ring frame'] is None else
cyl_results['Heavy ring frame'],2)])
else:
cyl_uf = 0
cyl_long_str = ' '
cyl_long_str = None
cyl_thickness = None
rp_uf = rec_for_color[line]['rp buckling']
tot_uf_rp = max([rec_for_color[line]['fatigue'], rp_uf,
rec_for_color[line]['section modulus'], rec_for_color[line]['shear'],
rec_for_color[line]['plate thickness']])
tot_uf_puls = max([rec_for_color[line]['fatigue'], puls_uf,
rec_for_color[line]['section modulus'], rec_for_color[line]['shear'],
rec_for_color[line]['plate thickness']])
try:
this_pressure = self.get_highest_pressure(line)['normal']
except KeyError:
this_pressure = 0
rp_util = max(list(return_dict['utilization'][line].values()))
res = list()
for stress_list, this_stress in zip([sig_x, sig_y1, sig_y2, tau_xy],
[line_data[0].Plate.get_sigma_x1(), line_data[0].Plate.get_sigma_y1(),
line_data[0].Plate.get_sigma_y2(), line_data[0].Plate.get_tau_xy()]):
if type(stress_list)==float:
res.append(1)
elif len(stress_list) == 1:
res.append(1)
elif max(stress_list) == 0 and min(stress_list) == 0:
res.append(0)
elif this_stress < 0:
res.append(this_stress /min(stress_list))
elif this_stress >= 0:
res.append(this_stress/ max(stress_list))
sig_x_uf, sig_y1_uf, sig_y2_uf , tau_xy_uf = res
if type(all_cyl_thk) is not list:
all_cyl_thk = all_cyl_thk.tolist()
line_color_coding[line] = {'plate': matplotlib.colors.rgb2hex(cmap_sections(
thk_sort_unique.index(round(line_data[0].Plate.get_pl_thk(),10))/len(thk_sort_unique))),
'spacing': 'black' if line_data[0].Stiffener is None else matplotlib.colors.rgb2hex(
cmap_sections(spacing_sort_unique.index(round(line_data[0].Stiffener
.get_s(), 10)) / len(
spacing_sort_unique))),
'section': 'black' if line_data[0].Stiffener is None else
matplotlib.colors.rgb2hex(cmap_sections(sec_in_model[line_data[0]
.Stiffener.get_beam_string()]/
len(list(recorded_sections)))),
'section cyl': 'black' if cyl_long_str is None else
matplotlib.colors.rgb2hex(cmap_sections(cyl_sec_in_model[cyl_long_str] /
len(list(recorded_cyl_sections)))),
'structure type': matplotlib.colors.rgb2hex(
cmap_sections(structure_type_unique.index(line_data[0].Plate.get_structure_type())
/len(structure_type_unique))),
'pressure color': 'black' if all_pressures in [[0],[0,1]] else matplotlib.colors.rgb2hex(cmap_sections(
this_pressure/highest_pressure)),
'pressure': this_pressure,
'rp uf color': matplotlib.colors.rgb2hex(cmap_sections(rp_util)),
'rp uf': rp_util,
'PULS method': puls_method,
'PULS sp or up':puls_sp_or_up,
'section modulus color': matplotlib.colors.rgb2hex(
cmap_sections(rec_for_color[line]['section modulus'])),
'fatigue color': matplotlib.colors.rgb2hex(
cmap_sections(rec_for_color[line]['fatigue'])),
'Total uf color rp' : matplotlib.colors.rgb2hex(
cmap_sections(tot_uf_rp)),
'Total uf rp': tot_uf_rp,
'Total uf color puls': matplotlib.colors.rgb2hex(
cmap_sections(tot_uf_puls)),
'Total uf puls': tot_uf_puls,
'PULS uf': round(puls_uf,2),
'PULS uf color': puls_color,
'fatigue uf' : rec_for_color[line]['fatigue'],
'section uf' : rec_for_color[line]['section modulus'],
'sigma x': matplotlib.colors.rgb2hex(cmap_sections(sig_x_uf)),
'sigma y1': matplotlib.colors.rgb2hex(cmap_sections(sig_y1_uf)),
'sigma y2': matplotlib.colors.rgb2hex(cmap_sections(sig_y2_uf)),
'tau xy':matplotlib.colors.rgb2hex(cmap_sections(tau_xy_uf)),
'cylinder uf': matplotlib.colors.rgb2hex(cmap_sections(cyl_uf)),
'cylinder plate' : matplotlib.colors.rgb2hex
(cmap_sections(0 if cyl_thickness is None else all_cyl_thk.index(cyl_thickness)/len(all_cyl_thk)))
}
return_dict['color code']['lines'] = line_color_coding
# COG calculations
# Steel
tot_weight += return_dict['weights'][line]['line weight']
weight_mult_dist_x += return_dict['weights'][line]['line weight']\
*return_dict['weights'][line]['mid_coord'][0]
weight_mult_dist_y += return_dict['weights'][line]['line weight']\
*return_dict['weights'][line]['mid_coord'][1]
tot_cog = [weight_mult_dist_x/tot_weight, weight_mult_dist_y/tot_weight]
else:
tot_cog = [0,0]
tot_weight = 0
return_dict['COG'] = tot_cog
return_dict['Total weight'] = tot_weight
return return_dict
def draw_canvas(self, state = None, event = None):
'''
Canvas is drawn here.
'''
self._main_canvas.delete('all')
color = 'black' #by default
# Drawing the shifted lines
if any([self._new_shift_viz_coord_hor.get()!=0, self._new_shift_viz_coord_ver.get()!= 0]) and self._new_shifted_coords.get():
self._main_canvas.create_line(self._canvas_draw_origo[0]+self._canvas_scale*self._new_shift_viz_coord_hor.get()/1000, 0,
self._canvas_draw_origo[0]+self._canvas_scale*self._new_shift_viz_coord_hor.get()/1000,
self._canvas_dim[1] + 500,
stipple='gray50', fill = 'peru')
self._main_canvas.create_line(0, self._canvas_draw_origo[1]-self._canvas_scale*self._new_shift_viz_coord_ver.get()/1000,
self._canvas_dim[0] + 500,
self._canvas_draw_origo[1]-self._canvas_scale*self._new_shift_viz_coord_ver.get()/1000,
stipple='gray50', fill = 'peru')
else:
# Drawing lines at (0, 0)
self._main_canvas.create_line(self._canvas_draw_origo[0], 0, self._canvas_draw_origo[0], self._canvas_dim[1]+500,
stipple= 'gray50', fill=self._color_text)
self._main_canvas.create_line(0, self._canvas_draw_origo[1], self._canvas_dim[0] +500, self._canvas_draw_origo[1],
stipple='gray50', fill=self._color_text)
self._main_canvas.create_text(self._canvas_draw_origo[0] - 30 * 1,
self._canvas_draw_origo[1] + 12 * 1, text='(0,0)',
font='Text 10', fill = self._color_text)
# Drawing COG and COB
if self._new_show_cog.get():
pt_size = 5
if 'COG' in state.keys():
if self._new_shifted_coords.get():
point_coord_x = self._canvas_draw_origo[0] + (state['COG'][0] +
self._new_shift_viz_coord_hor.get()/1000) * \
self._canvas_scale
point_coord_y = self._canvas_draw_origo[1] - (state['COG'][1] +
self._new_shift_viz_coord_ver.get()/1000) * \
self._canvas_scale
else:
point_coord_x = self._canvas_draw_origo[0] + state['COG'][0]*self._canvas_scale
point_coord_y = self._canvas_draw_origo[1] - state['COG'][1]*self._canvas_scale
self._main_canvas.create_oval(point_coord_x - pt_size + 2,
point_coord_y - pt_size + 2,
point_coord_x + pt_size + 2,
point_coord_y + pt_size + 2, fill='yellow')
self._main_canvas.create_text(point_coord_x + 5,
point_coord_y - 14, text='steel COG: x=' + str(round(state['COG'][0], 2)) +
' y=' +str(round(state['COG'][1],2)),
fill = self._color_text)
if self._center_of_buoyancy != {}:
for draft, cob in self._center_of_buoyancy.items():
if self._new_shifted_coords.get():
point_coord_x = self._canvas_draw_origo[0] + (cob[1] +
self._new_shift_viz_coord_hor.get() / 1000) * \
self._canvas_scale
point_coord_y = self._canvas_draw_origo[1] - (cob[0] +
self._new_shift_viz_coord_ver.get() / 1000) * \
self._canvas_scale
else:
point_coord_x = self._canvas_draw_origo[0] + cob[1] * self._canvas_scale
point_coord_y = self._canvas_draw_origo[1] - cob[0] * self._canvas_scale
self._main_canvas.create_oval(point_coord_x - pt_size + 2,
point_coord_y - pt_size + 2,
point_coord_x + pt_size + 2,
point_coord_y + pt_size + 2, fill='blue')
self._main_canvas.create_text(point_coord_x + 5,
point_coord_y + 14,
text='COB d='+str(draft) +': x=' + str(round(cob[1], 2)) +
' y=' + str(round(cob[0], 2)),
font=self._text_size["Text 8"], fill='blue')
chk_box_active = [self._new_colorcode_beams.get(), self._new_colorcode_plates.get(),
self._new_colorcode_pressure.get(), self._new_colorcode_utilization.get(),
self._new_colorcode_sigmax.get(), self._new_colorcode_sigmay1.get(), self._new_colorcode_sigmay2.get(),
self._new_colorcode_tauxy.get(), self._new_colorcode_structure_type.get(),
self._new_colorcode_fatigue.get(), self._new_colorcode_section_modulus.get(),
self._new_colorcode_total.get(), self._new_colorcode_puls_acceptance.get(),
self._new_colorcode_puls_sp_or_up.get(), self._new_colorcode_spacing.get()].count(True)> 0
if chk_box_active and state != None:
self.color_code_text(state)
# Drawing shortcut information if selected.
if self._new_shortcut_backdrop.get() == True:
self._main_canvas.create_text(self._main_canvas.winfo_width()*0.87, self._main_canvas.winfo_height()*0.16,
text = self._shortcut_text,
font=self._text_size["Text 8"],
fill = self._color_text)
# drawing the point dictionary
pt_size = 3
for key, value in self._point_dict.items():
if self._new_shifted_coords.get():
x_coord = round(self.get_point_actual_coord(key)[0] - self._new_shift_viz_coord_hor.get() / 1000, 3)
y_coord = round(self.get_point_actual_coord(key)[1] - self._new_shift_viz_coord_ver.get() / 1000, 3)
coord_color = 'peru'
else:
x_coord = round(self.get_point_actual_coord(key)[0], 3)
y_coord = round(self.get_point_actual_coord(key)[1], 3)
coord_color = self._color_text
if self._point_is_active and key == self._active_point :
self._main_canvas.create_oval(self.get_point_canvas_coord(key)[0] - pt_size+2,
self.get_point_canvas_coord(key)[1] - pt_size+2,
self.get_point_canvas_coord(key)[0] + pt_size+2,
self.get_point_canvas_coord(key)[1] + pt_size+2, fill='blue')
if self._new_draw_point_name.get():
# drawing the name of the point
self._main_canvas.create_text(self.get_point_canvas_coord(key)[0] + 5,
self.get_point_canvas_coord(key)[1] - 14, text='pt.'+str(get_num(key)),
font=self._text_size["Text 12 bold"], fill = 'red')
# drawing the coordinates of the point
self._main_canvas.create_text(self.get_point_canvas_coord(key)[0]+30,
self.get_point_canvas_coord(key)[1]-40,
text='(' + str(x_coord) + ' , ' +
str(y_coord) + ')',
font="Text 14", fill = 'red')
else:
self._main_canvas.create_oval(self.get_point_canvas_coord(key)[0] - pt_size,
self.get_point_canvas_coord(key)[1] - pt_size,
self.get_point_canvas_coord(key)[0] + pt_size,
self.get_point_canvas_coord(key)[1] + pt_size, fill='red')
if self._new_draw_point_name.get():
#printing 'pt.#'
self._main_canvas.create_text(self.get_point_canvas_coord(key)[0] + 15,
self.get_point_canvas_coord(key)[1] - 10, text='pt.'+str(get_num(key)),
font="Text 10", fill = self._color_text)
#printing the coordinates of the point
self._main_canvas.create_text(self.get_point_canvas_coord(key)[0]+35,
self.get_point_canvas_coord(key)[1]+10 ,
text='(' + str(x_coord) + ' , ' +
str(y_coord) + ')',
font="Text 10", fill = coord_color)
# drawing the line dictionary.
if len(self._line_dict) != 0:
for line, value in self._line_dict.items():
coord1 = self.get_point_canvas_coord('point' + str(value[0]))
coord2 = self.get_point_canvas_coord('point' + str(value[1]))
if not chk_box_active and state != None:
try:
if self._line_to_struc[line][5] is not None: # Cylinder
cylinder_results = state['cylinder'][line]
all_cyl_chks = list()
for key, val in cylinder_results.items():
if key in ['Unstiffened shell', 'Longitudinal stiffened shell',
'Ring stiffened shell', 'Heavy ring frame']:
all_cyl_chks.append(True if val is None else val < 1)
elif key == 'Stiffener check' and val is not None:
for stf_key, stf_val in val.items():
if stf_val is not None:
all_cyl_chks.append(stf_val)
color = 'green' if all(all_cyl_chks) else 'red'
elif self._new_buckling_method.get() == 'DNV PULS':
if 'black' in state['PULS colors'][line].values():
color = 'black'
else:
col1, col2 = state['PULS colors'][line]['buckling'], \
state['PULS colors'][line]['ultimate']
if self._line_to_struc[line][0].Plate.get_puls_method() == 'buckling':
color = 'red' if any([col1 == 'red', col2 == 'red']) else 'green'
else:
color = col2
if color == 'green':
color = 'green' if all([state['colors'][line][key] == 'green' for key in
['fatigue', 'section', 'shear','thickness']]) else 'red'
elif self._new_buckling_method.get() == 'DNV-RP-C201 - prescriptive':
color = 'red' if 'red' in state['colors'][line].values() else 'green'
elif self._new_buckling_method.get() == 'ML-CL (PULS based)':
if 'black' in state['ML buckling colors'][line].values():
color = 'black'
else:
col1, col2 = state['ML buckling colors'][line]['buckling'], \
state['ML buckling colors'][line]['ultimate']
if self._line_to_struc[line][0].Plate.get_puls_method() == 'buckling':
color = col1
else:
color = col2
if color == 'green':
color = 'green' if all([state['colors'][line][key] == 'green' for key in
['fatigue', 'section', 'shear','thickness']]) else 'red'
except (KeyError, TypeError):
color = 'black'
elif chk_box_active and state != None and self._line_to_struc != {}:
color = self.color_code_line(state, line, coord1, [coord2[0] - coord1[0], coord2[1] - coord1[1]])
else:
color = 'black'
vector = [coord2[0] - coord1[0], coord2[1] - coord1[1]]
# drawing a bold line if it is selected
if all([line == self._active_line, self._line_is_active]):
if line not in self._line_to_struc.keys():
self._main_canvas.create_line(coord1, coord2, width=6, fill=self._color_text)
elif self._line_to_struc[line][5] is not None:
self._main_canvas.create_line(coord1, coord2, width=10, fill = color, stipple = 'gray50')
else:
self._main_canvas.create_line(coord1, coord2, width=6, fill=color)
if self._new_line_name.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2+10,
text='Line ' + str(get_num(line)),
font=self._text_size["Text 10 bold"],
fill = 'red')
else:
if line not in self._line_to_struc.keys():
self._main_canvas.create_line(coord1, coord2, width=3, fill=self._color_text)
elif self._line_to_struc[line][5] is not None:
self._main_canvas.create_line(coord1, coord2, width=6, fill = color, stipple = 'gray50')
else:
self._main_canvas.create_line(coord1, coord2, width=3, fill = color)
if self._new_line_name.get():
self._main_canvas.create_text(coord1[0]-20 + vector[0] / 2 + 5, coord1[1] + vector[1] / 2+10,
text='l.' + str(get_num(line)), font="Text 8",
fill = self._color_text)
if line in self._multiselect_lines:
self._main_canvas.create_text(coord1[0] + vector[0] / 2 +5, coord1[1] + vector[1] / 2 -10,
text=self._new_toggle_var.get(),
fill='orange')
# drawing waterline
if len(self._load_dict) != 0:
for load, data in self._load_dict.items():
if data[0].is_static():
draft = self.get_canvas_coords_from_point_coords((0,data[0].get_static_draft()))[1]
self._main_canvas.create_line(0,draft,self._canvas_dim[0]+500,draft, fill="blue", dash=(4, 4))
self._main_canvas.create_text(900,draft-10,text=str(get_num(data[0].get_name()))+' [m]',fill ='blue')
else:
pass
def color_code_text(self, state):
'''
return_dict['color code'] = {'thickest plate': thickest_plate, 'thickness map': thk_map,
'highest pressure': highest_pressure, 'lowest pressure': lowest_pressure,
'pressure map': press_map,
'all utilizations': all_utils, 'utilization map': util_map,
'max sigma x': max(sig_x), 'min sigma x': min(sig_x), 'sigma x map': sig_x_map,
'max sigma y1': max(sig_y1), 'min sigma y1': min(sig_y1),
'sigma y1 map': sig_y1_map,
'max sigma y2': max(sig_y2), 'min sigma y2': min(sig_y2),
'sigma y2 map': sig_y2_map,
'max tau xy': max(tau_xy), 'min tau xy': min(tau_xy), 'tau_xy map': tau_xy_map,
'structure types map': set(structure_type), 'sections in model': sec_in_model,
'recorded sections': recorded_sections}
}
:param state:
:return:
'''
cc_state = state['color code']
if cc_state == {}:
return
start_text, start_text_shift = 190,191
cmap_sections = plt.get_cmap('jet')
if self._new_colorcode_beams.get() == True and self._line_to_struc != {}:
sec_in_model = cc_state['sections in model']
for section, idx in sec_in_model.items():
if section =='length':
continue
self._main_canvas.create_text(11, start_text_shift+20*idx, text=section,
font=self._text_size["Text 10 bold"],
fill='black',
anchor="nw")
self._main_canvas.create_text(10, start_text+20*idx, text=section,
font=self._text_size["Text 10 bold"],
fill=matplotlib.colors.rgb2hex(cmap_sections(idx/sec_in_model['length'])),
anchor="nw")
elif self._new_colorcode_plates.get() == True and self._line_to_struc != {}:
cylinder = False
for obj_list in self._line_to_struc.values():
if obj_list[5] is not None:
cylinder = True
if cylinder:
all_thicknesses = np.unique(cc_state['all cyl thicknesses']).tolist()
else:
all_thicknesses = np.unique(cc_state['all thicknesses']).tolist()
for idx, thk in enumerate(np.unique(all_thicknesses).tolist()):
self._main_canvas.create_text(11, start_text_shift+20*idx, text=str('Plate '+
str(thk if cylinder else thk*1000) + ' mm'),
font=self._text_size["Text 10 bold"],
fill='black',
anchor="nw")
self._main_canvas.create_text(10, start_text+20*idx, text=str('Plate '+ str(thk if cylinder else thk*1000) + ' mm'),
font=self._text_size["Text 10 bold"],
fill=matplotlib.colors.rgb2hex(cmap_sections(all_thicknesses.index(thk)
/len(all_thicknesses))),
anchor="nw")
elif self._new_colorcode_spacing.get() == True and self._line_to_struc != {}:
all_spacings = cc_state['spacings']
for idx, s in enumerate(all_spacings):
self._main_canvas.create_text(11, start_text_shift+20*idx, text=str('Spacing '+ str(s*1000) + ' mm'),
font=self._text_size["Text 10 bold"],
fill='black',
anchor="nw")
self._main_canvas.create_text(10, start_text+20*idx, text=str('Spacing '+ str(s*1000) + ' mm'),
font=self._text_size["Text 10 bold"],
fill=matplotlib.colors.rgb2hex(cmap_sections(all_spacings.index(s)
/len(all_spacings))),
anchor="nw")
elif self._new_colorcode_pressure.get() == True and self._line_to_struc != {}:
highest_pressure = cc_state['highest pressure']
press_map = cc_state['pressure map']
for idx, press in enumerate(press_map):
self._main_canvas.create_text(11, start_text_shift+20*idx, text=str(str(press) + ' Pa'),
font=self._text_size["Text 10 bold"],
fill='black',
anchor="nw")
self._main_canvas.create_text(10, start_text+20*idx, text=str(str(press) + ' Pa'),
font=self._text_size["Text 10 bold"],
fill=matplotlib.colors.rgb2hex(cmap_sections(0 if highest_pressure == 0
else press/highest_pressure)),
anchor="nw")
elif all([self._new_colorcode_utilization.get() == True,
self._line_to_struc != {}, self._new_buckling_method.get() != 'DNV PULS']):
all_utils = cc_state['utilization map']
for idx, uf in enumerate(cc_state['utilization map']):
self._main_canvas.create_text(11, start_text_shift + 20 * idx, text=str('UF = ' +str(round(uf,1))),
font=self._text_size["Text 10 bold"],
fill='black',
anchor="nw")
self._main_canvas.create_text(10, start_text + 20 * idx, text=str('UF = ' +str(round(uf,1))),
font=self._text_size["Text 10 bold"],
fill=matplotlib.colors.rgb2hex(cmap_sections(uf/max(all_utils))),
anchor="nw")
elif all([self._new_colorcode_utilization.get() == True,
self._line_to_struc != {}, self._new_buckling_method == 'DNV PULS']):
all_utils = cc_state['PULS utilization map']
for idx, uf in enumerate(cc_state['utilization map']):
self._main_canvas.create_text(11, start_text_shift + 20 * idx, text=str('UF = ' +str(round(uf,1))),
font=self._text_size["Text 10 bold"],
fill='black',
anchor="nw")
self._main_canvas.create_text(10, start_text + 20 * idx, text=str('UF = ' +str(round(uf,1))),
font=self._text_size["Text 10 bold"],
fill=matplotlib.colors.rgb2hex(cmap_sections(uf/max(all_utils))),
anchor="nw")
elif self._new_colorcode_sigmax.get() == True:
for idx, value in enumerate(cc_state['sigma x map']):
self._main_canvas.create_text(11, start_text_shift+20*idx, text=str(str(round(value,5)) + ' MPa'),
font=self._text_size["Text 10 bold"],
fill='black',
anchor="nw")
self._main_canvas.create_text(10, start_text+20*idx, text=str(str(round(value,5)) + ' MPa'),
font=self._text_size["Text 10 bold"],
fill='black' if cc_state['max sigma x']-cc_state['min sigma x'] == 0 else
matplotlib.colors.rgb2hex(
cmap_sections(0 if cc_state['max sigma x'] == 0 else
(value+ abs(cc_state['min sigma x'])) /
(cc_state['max sigma x']-cc_state['min sigma x']))),
anchor="nw")
elif self._new_colorcode_sigmay1.get() == True:
for idx, value in enumerate(cc_state['sigma y1 map']):
self._main_canvas.create_text(11, start_text_shift+20*idx, text=str(str(round(value,5)) + ' MPa'),
font=self._text_size["Text 10 bold"],
fill='black',
anchor="nw")
self._main_canvas.create_text(10, start_text+20*idx, text=str(str(round(value,5)) + ' MPa'),
font=self._text_size["Text 10 bold"],
fill='black' if cc_state['max sigma y1']-cc_state['min sigma y1'] == 0
else matplotlib.colors.rgb2hex(
cmap_sections(0 if cc_state['max sigma y1'] == 0 else
(value+ abs(cc_state['min sigma y1'])) /
(cc_state['max sigma y1']-cc_state['min sigma y1']))),
anchor="nw")
elif self._new_colorcode_sigmay2.get() == True:
for idx, value in enumerate(cc_state['sigma y2 map']):
self._main_canvas.create_text(11, start_text_shift+20*idx, text=str(str(round(value,5)) + ' MPa'),
font=self._text_size["Text 10 bold"],
fill='black',
anchor="nw")
self._main_canvas.create_text(10, start_text+20*idx, text=str(str(round(value,5)) + ' MPa'),
font=self._text_size["Text 10 bold"],
fill='black' if cc_state['max sigma y2']-cc_state['min sigma y2'] == 0 else
matplotlib.colors.rgb2hex(
cmap_sections(0 if cc_state['max sigma y2'] == 0 else
(value+ abs(cc_state['min sigma y2'])) /
(cc_state['max sigma y2']-cc_state['min sigma y2']))),
anchor="nw")
elif self._new_colorcode_tauxy.get() == True:
for idx, value in enumerate(cc_state['tau xy map']):
self._main_canvas.create_text(11, start_text_shift+20*idx, text=str(str(round(value,5)) + ' MPa'),
font=self._text_size["Text 10 bold"],
fill='black',
anchor="nw")
self._main_canvas.create_text(10, start_text+20*idx, text=str(str(round(value,5)) + ' MPa'),
font=self._text_size["Text 10 bold"],
fill='black' if cc_state['max tau xy']-cc_state['min tau xy'] == 0 else
matplotlib.colors.rgb2hex(
cmap_sections(0 if cc_state['max tau xy'] == 0 else
(value+ abs(cc_state['min tau xy'])) /
(cc_state['max tau xy']-cc_state['min tau xy']))),
anchor="nw")
elif self._new_colorcode_structure_type.get() == True:
structure_type_map = list(cc_state['structure types map'])
for idx, structure_type in enumerate(structure_type_map):
self._main_canvas.create_text(11, start_text_shift+20*idx, text=structure_type,
font=self._text_size["Text 10 bold"],
fill='black',
anchor="nw")
self._main_canvas.create_text(10, start_text+20*idx, text=structure_type,
font=self._text_size["Text 10 bold"],
fill=matplotlib.colors.rgb2hex(cmap_sections(structure_type_map
.index(structure_type)/
len(structure_type_map))),
anchor="nw")
elif self._new_colorcode_section_modulus.get() == True or self._new_colorcode_fatigue.get() == True or \
self._new_colorcode_fatigue.get() == True or self._new_colorcode_total.get() == True:
for idx, value in enumerate(cc_state['section modulus map']):
self._main_canvas.create_text(11, start_text_shift+20*idx, text=str(str(round(value,5))),
font=self._text_size["Text 10 bold"],
fill='black',
anchor="nw")
self._main_canvas.create_text(10, start_text+20*idx, text=str(str(round(value,5))),
font=self._text_size["Text 10 bold"],
fill=matplotlib.colors.rgb2hex(
cmap_sections(value)),
anchor="nw")
elif self._new_colorcode_puls_sp_or_up.get() == True:
for idx, value in enumerate(['SP', 'UP']):
self._main_canvas.create_text(11, start_text_shift+20*idx, text=value,
font=self._text_size["Text 10 bold"],
fill='black',
anchor="nw")
self._main_canvas.create_text(10, start_text+20*idx, text=value,
font=self._text_size["Text 10 bold"],
fill='blue' if value == 'SP' else 'red',
anchor="nw")
elif self._new_colorcode_puls_acceptance.get() == True:
for idx, value in enumerate(['buckling', 'ultimate']):
self._main_canvas.create_text(11, start_text_shift+20*idx, text=value,
font=self._text_size["Text 10 bold"],
fill='black',
anchor="nw")
self._main_canvas.create_text(10, start_text+20*idx, text=value,
font=self._text_size["Text 10 bold"],
fill='blue' if value == 'ultimate' else 'red',
anchor="nw")
def color_code_line(self, state, line, coord1, vector):
cc_state = state['color code']
if line not in state['color code']['lines'].keys():
return 'black'
if self._new_colorcode_beams.get() == True and line in list(self._line_to_struc.keys()):
if self._line_to_struc[line][5] is not None or self._line_to_struc[line][0].Stiffener is None:
if self._line_to_struc[line][5] is not None:
cyl_obj = self._line_to_struc[line][5]
if cyl_obj.LongStfObj is not None:
this_text = cyl_obj.LongStfObj.get_beam_string(short = True)
color = state['color code']['lines'][line]['section cyl']
else:
this_text = 'N/A'
color = 'grey'
else:
color = 'grey'
this_text = 'N/A'
elif self._line_to_struc[line][0].Plate is not None:
color = state['color code']['lines'][line]['section']
this_text = self._line_to_struc[line][0].Plate.get_beam_string()
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text=this_text ,
font=self._text_size["Text 7"])
elif self._new_colorcode_plates.get() == True and line in list(self._line_to_struc.keys()):
if self._line_to_struc[line][5] is not None:
cyl_obj = self._line_to_struc[line][5]
color = state['color code']['lines'][line]['cylinder plate']
this_text = str(round(cyl_obj.ShellObj.thk * 1000, 2))
else:
color = state['color code']['lines'][line]['plate']
this_text = str(self._line_to_struc[line][0].Plate.get_pl_thk()*1000)
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text=this_text)
elif self._new_colorcode_spacing.get() == True and line in list(self._line_to_struc.keys()):
if self._line_to_struc[line][5] is not None or self._line_to_struc[line][0].Stiffener is None:
color = 'grey'
this_text = 'N/A'
else:
color = state['color code']['lines'][line]['spacing']
this_text = str(self._line_to_struc[line][0].Stiffener.get_s()*1000)
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text=this_text)
elif self._new_colorcode_pressure.get() == True and line in list(self._line_to_struc.keys()):
if self._line_to_struc[line][5] is not None:
color = 'grey'
this_text = 'N/A'
else:
if cc_state['all pressures'] == [0, 1]:
color = 'black'
else:
color = state['color code']['lines'][line]['pressure color']
this_text = str(state['color code']['lines'][line]['pressure'])
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text=this_text)
elif self._new_colorcode_utilization.get() == True and self._new_buckling_method.get() == 'DNV-RP-C201 - prescriptive':
if line not in list(self._line_to_struc.keys()):
color = 'black'
this_text = 'N/A'
elif self._line_to_struc[line][5] is not None:
cyl_obj = self._line_to_struc[line][5]
results = cyl_obj.get_utilization_factors()
ufs = [round(0 if results['Unstiffened shell'] is None else results['Unstiffened shell'], 2),
round(0 if results['Longitudinal stiffened shell'] is None else results['Longitudinal stiffened shell'],2),
round(0 if results['Ring stiffened shell'] is None else results['Ring stiffened shell'], 2),
round(0 if results['Heavy ring frame'] is None else results['Heavy ring frame'], 2)]
color = state['color code']['lines'][line]['cylinder uf']
this_text = str(max(ufs))
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text=this_text)
else:
color = state['color code']['lines'][line]['rp uf color']
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text=round(state['color code']['lines'][line]['rp uf'],2))
elif self._new_colorcode_utilization.get() == True and self._new_buckling_method.get() == 'DNV PULS':
if self._line_to_struc[line][5] is not None:
color = 'grey'
this_text = 'N/A'
else:
color = state['color code']['lines'][line]['PULS uf color']
this_text = round(state['color code']['lines'][line]['PULS uf'],2)
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text=this_text)
elif self._new_colorcode_utilization.get() == True and self._new_buckling_method.get() == 'ML-CL (PULS based)':
color = 'black'
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text='N/A')
elif self._new_colorcode_sigmax.get() == True:
if self._line_to_struc[line][5] is not None:
color = 'grey'
this_text = 'N/A'
else:
color = state['color code']['lines'][line]['sigma x']
this_text = str(self._line_to_struc[line][0].Plate.get_sigma_x1())
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text=this_text)
elif self._new_colorcode_sigmay1.get() == True:
if self._line_to_struc[line][5] is not None:
color = 'grey'
this_text = 'N/A'
else:
color = state['color code']['lines'][line]['sigma y1']
this_text = str(self._line_to_struc[line][0].Plate.get_sigma_y2())
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text=this_text)
elif self._new_colorcode_sigmay2.get() == True:
if self._line_to_struc[line][5] is not None:
color = 'grey'
this_text = 'N/A'
else:
color = state['color code']['lines'][line]['sigma y2']
this_text = str(self._line_to_struc[line][0].Plate.get_sigma_y2())
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text=this_text)
elif self._new_colorcode_tauxy.get() == True:
if self._line_to_struc[line][5] is not None:
color = 'grey'
this_text = 'N/A'
else:
color = state['color code']['lines'][line]['tau xy']
this_text =round(self._line_to_struc[line][0].Plate.get_tau_xy(),2)
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text=this_text)
elif self._new_colorcode_structure_type.get() == True:
if self._line_to_struc[line][5] is not None:
color = 'grey'
this_text = 'N/A'
else:
color = state['color code']['lines'][line]['structure type']
this_text =self._line_to_struc[line][0].Plate.get_structure_type()
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text=this_text,
font=self._text_size["Text 7"])
elif self._new_colorcode_section_modulus.get() == True:
if self._line_to_struc[line][5] is not None:
color = 'grey'
this_text = 'N/A'
else:
color = state['color code']['lines'][line]['section modulus color']
this_text = round(state['color code']['lines'][line]['section uf'],2)
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text=this_text)
elif self._new_colorcode_fatigue.get() == True:
if self._line_to_struc[line][5] is not None:
color = 'grey'
this_text = 'N/A'
else:
color = state['color code']['lines'][line]['fatigue color']
this_text = round(state['color code']['lines'][line]['fatigue uf'],2)
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text=this_text)
elif self._new_colorcode_total.get() == True:
if self._line_to_struc[line][5] is not None:
cyl_obj = self._line_to_struc[line][5]
results = cyl_obj.get_utilization_factors()
ufs = [round(0 if results['Unstiffened shell'] is None else results['Unstiffened shell'], 2),
round(0 if results['Longitudinal stiffened shell'] is None else results['Longitudinal stiffened shell'],2),
round(0 if results['Ring stiffened shell'] is None else results['Ring stiffened shell'], 2),
round(0 if results['Heavy ring frame'] is None else results['Heavy ring frame'], 2)]
color = state['color code']['lines'][line]['cylinder uf']
this_text = str(max(ufs))
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text=this_text)
elif self._new_buckling_method.get() == 'DNV PULS':
color = state['color code']['lines'][line]['Total uf color puls']
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text=round(state['color code']['lines'][line]['Total uf puls'],2))
elif self._new_buckling_method.get() == 'DNV-RP-C201 - prescriptive':
color = state['color code']['lines'][line]['Total uf color rp']
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text=round(state['color code']['lines'][line]['Total uf rp'],2))
elif self._new_buckling_method.get() == 'ML-CL (PULS based)':
color = 'black'
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text='N/A')
elif self._new_colorcode_puls_acceptance.get():
if state['color code']['lines'][line]['PULS method'] == None:
color = 'black'
else:
color = 'blue' if state['color code']['lines'][line]['PULS method'] == 'ultimate' else 'red'
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text=state['color code']['lines'][line]['PULS method'])
elif self._new_colorcode_puls_sp_or_up.get():
if state['color code']['lines'][line]['PULS sp or up'] == None:
color = 'black'
else:
color = 'blue' if state['color code']['lines'][line]['PULS sp or up'] == 'SP' else 'red'
if self._new_label_color_coding.get():
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 - 10,
text=state['color code']['lines'][line]['PULS sp or up'])
else:
color = 'black'
return color
def draw_prop(self, event = None):
'''
Prints the properties of the selected line to the bottom canvas.
properties for line dicitonary:
name of line : [ Structure class, calc scantling class, calc fatigue class, [load classes] ]
'''
self._prop_canvas.delete('all')
canvas_width = self._prop_canvas.winfo_width()
canvas_height = self._prop_canvas.winfo_height()
def checkered(line_distance, canvas):
'''
Grid lines in the properties canvas.
:param line_distance:
:return:
'''
# vertical lines at an interval of "line_distance" pixel
for x in range(line_distance, canvas_width, line_distance):
canvas.create_line(x, 0, x, canvas_width, stipple='gray50', activestipple='gray75')
# horizontal lines at an interval of "line_distance" pixel
for y in range(line_distance, canvas_height, line_distance):
canvas.create_line(0, y, canvas_width, y, stipple='gray50', activestipple='gray75')
if self._active_line in self._line_to_struc:
self.set_selected_variables(self._active_line)
# printing the properties to the active line
if self._line_is_active and self._line_to_struc[self._active_line][5] is None:
#checkered(10, self._prop_canvas)
self._prop_canvas.create_text([canvas_width/2-canvas_width/20, canvas_height/20],
text ='SELECTED: '+str(self._active_line),
font=self._text_size["Text 10 bold"], fill='red')
if all([self._line_to_struc[self._active_line][0].Stiffener is None,
self._line_to_struc[self._active_line][0].Girder is None]):
structure_obj = self._line_to_struc[self._active_line][0].Plate
spacing = structure_obj.get_s() * self._prop_canvas_scale * 3
plate_thk = structure_obj.get_pl_thk() * self._prop_canvas_scale * 3
startx = 20
starty = 225
self._prop_canvas.create_text([startx + 100, 50],
text='Plate with thickness ' +
str(structure_obj.get_pl_thk()*1000) + ' mm' ,
font=self._text_size["Text 10 bold"], fill='Black')
self._prop_canvas.create_rectangle(startx + spacing,
starty,
startx + spacing + spacing,
starty - plate_thk,
fill='grey', activefill='yellow')
for idx, structure_obj in enumerate([self._line_to_struc[self._active_line][0].Stiffener,
self._line_to_struc[self._active_line][0].Girder]):
mult = 1 if self._line_to_struc[self._active_line][0].Girder is not None else 2 # *(400/max_web)
thk_mult = 2 # *(400/max_web)
startx = 100 + 300 * idx
starty = 225
if structure_obj is not None:
self._prop_canvas.create_text([startx +60, 50],
text='Stiffener\n' +structure_obj.get_beam_string()
if idx == 0 else 'Girder\n' + structure_obj.get_beam_string(),
font=self._text_size["Text 9 bold"], fill='Black')
if structure_obj is not None:
self._prop_canvas.create_text([100, 20],
text='Thickness scale x 2',
font=self._text_size["Text 10 bold"], fill='grey')
# drawing stiffener
spacing = structure_obj.get_s()*self._prop_canvas_scale * mult
stf_web_height = structure_obj.get_web_h()*self._prop_canvas_scale * mult
stf_flange_width = structure_obj.get_fl_w() *self._prop_canvas_scale * mult
plate_thk = structure_obj.get_pl_thk()*self._prop_canvas_scale*thk_mult * mult
stf_web_thk = structure_obj.get_web_thk()*self._prop_canvas_scale*thk_mult * mult
stf_flange_thk = structure_obj.get_fl_thk()*self._prop_canvas_scale*thk_mult * mult
for count in [0,1,2] if idx == 0 else [0,]:
self._prop_canvas.create_rectangle(startx + count*spacing,
starty,
startx+spacing+ count*spacing,
starty- plate_thk ,
fill = 'grey', activefill = 'yellow')
self._prop_canvas.create_rectangle(startx+spacing*0.5+ count*spacing - stf_web_thk/2,
starty - plate_thk,
startx+spacing*0.5+ count*spacing + stf_web_thk/2,
starty - stf_web_height - plate_thk,
fill = 'grey', activefill = 'yellow')
if structure_obj.get_stiffener_type() not in ['L', 'L-bulb']:
self._prop_canvas.create_rectangle(startx+spacing*0.5-stf_flange_width/2+ count*spacing,
starty - stf_web_height - plate_thk,
startx + spacing * 0.5 + stf_flange_width / 2+ count*spacing,
starty - stf_web_height - plate_thk- stf_flange_thk,
fill = 'grey', activefill = 'yellow')
else:
self._prop_canvas.create_rectangle(startx+spacing*0.5-stf_web_thk/2+ count*spacing,
starty-stf_web_height - plate_thk,
startx + spacing * 0.5 + stf_flange_width + count*spacing,
starty - stf_web_height - plate_thk - stf_flange_thk,
fill = 'grey',
activefill = 'yellow')
elif self._line_is_active and self._line_to_struc[self._active_line][5] is not None:
self.draw_cylinder(canvas = self._prop_canvas,CylObj = self._line_to_struc[self._active_line][5],
height = 200, radius = 150, start_x_cyl = 500,start_y_cyl = 20,
text_color= self._color_text)
else:
pass
@staticmethod
def draw_cylinder(text_size = None, canvas = None, CylObj: CylinderAndCurvedPlate = None,
height = 150, radius = 150,
start_x_cyl = 500,start_y_cyl = 20, acceptance_color = False, text_x = 200, text_y = 130,
text_color = 'black'):
canvas_width = canvas.winfo_width()
canvas_height = canvas.winfo_height()
if text_size == None:
text_size = 'Verdana 8'
canvas.create_text([text_x, text_y], text=CylObj, font=text_size,fill = text_color)
# setting the input field to active line properties
#self.set_selected_variables(self._active_line)
offset_oval = 30
coord1 = start_x_cyl, start_y_cyl, start_x_cyl + radius, start_y_cyl+offset_oval
coord2 = start_x_cyl, start_y_cyl + height, start_x_cyl + radius,start_y_cyl+ offset_oval + height
arc_1 = canvas.create_oval(coord1, width=5, fill='grey90')
arc_2 = canvas.create_arc(coord2, extent=180, start=180, style=tk.ARC, width=3)
line1 = canvas.create_line(coord1[0], coord1[1] + offset_oval / 2,
coord1[0], coord1[1] + height + offset_oval / 2,
width=3)
line2 = canvas.create_line(coord1[0] + radius, coord1[1] + offset_oval / 2,
coord1[0] + radius, coord1[1] + height + offset_oval / 2,
width=3)
if CylObj.LongStfObj is not None:
long_obj = CylObj.LongStfObj
num_stf = int(1000 * 2*math.pi*CylObj.ShellObj.radius / long_obj.s / 2)
for line_num in range(1, num_stf, 1):
angle = 180 - 180 / (num_stf) * line_num
arc_x, arc_y = 1 * math.cos(math.radians(angle)), 0.5 * math.sin(math.radians(angle))
arc_x = (arc_x + 1) / 2
line1 = canvas.create_line(coord1[0] + radius * arc_x,
coord1[1] + 1 * arc_y * offset_oval+offset_oval/2,
coord1[0] + radius * arc_x,
coord1[1] + height + 1 * arc_y * offset_oval+offset_oval/2,
fill='blue')
if CylObj.RingStfObj is not None:
num_ring_stiff = CylObj.ShellObj.length_of_shell / \
CylObj.ShellObj._dist_between_rings
num_ring_stiff = int(num_ring_stiff)
for ring_stf in range(1, num_ring_stiff + 1, 1):
coord3 = coord1[0], coord1[1] + (height / (num_ring_stiff + 1)) * ring_stf, \
start_x_cyl + radius, coord1[3] + (height / (num_ring_stiff + 1)) * ring_stf,
arc_2 = canvas.create_arc(coord3, extent=180, start=180, style=tk.ARC, width=2,
fill='orange',
outline='orange')
if CylObj.RingFrameObj is not None:
num_ring_girder = CylObj.ShellObj.length_of_shell / \
CylObj.length_between_girders
num_ring_girder = int(num_ring_girder)
for ring_girder in range(1, num_ring_girder + 1, 1):
coord3 = coord1[0], coord1[1] + (height / (num_ring_girder + 1)) * ring_girder, \
start_x_cyl + radius, coord1[3] + (height / (num_ring_girder + 1)) * ring_girder,
arc_2 = canvas.create_arc(coord3, extent=180, start=180, style=tk.ARC, width=4,
fill='grey', outline='grey')
def draw_results(self, state = None):
'''
The properties canvas is created here.
state = {'colors': {}, 'section_modulus': {}, 'thickness': {}, 'shear_area': {}, 'buckling': {},
'fatigue': {}, 'pressure_uls': {}, 'pressure_fls': {},
'all_obj': {}, 'scant_calc_obj': {}, 'fatigue_obj': {}}
:return:
'''
self._result_canvas.delete('all')
if state is None or self._active_line not in state['all_obj'].keys():
return
if self._line_is_active:
x, y, dx, dy = 0, 5, 15, 17
if self._active_line in self._line_to_struc and self._line_to_struc[self._active_line][5] is None:
m3_to_mm3 = float(math.pow(1000,3))
m2_to_mm2 = float(math.pow(1000, 2))
current_line = self._active_line
obj_scnt_calc_pl = state['all_obj'][current_line].Plate
obj_scnt_calc_stf = state['all_obj'][current_line].Stiffener
obj_scnt_calc_girder = state['all_obj'][current_line].Girder
sec_mod = [round(state['section_modulus'][current_line]['sec_mod'][0], 5),
round(state['section_modulus'][current_line]['sec_mod'][1], 5)]
shear_area = state['shear_area'][current_line]['shear_area']
min_shear = state['shear_area'][current_line]['min_shear_area']
min_sec_mod = state['section_modulus'][current_line]['min_sec_mod']
min_thk = state['thickness'][current_line]['min_thk']
buckling = state['buckling'][current_line]
if state['slamming'][current_line]['state']:
slamming = True
slm_zpl = state['slamming'][current_line]['zpl']
slm_zpl_req = state['slamming'][current_line]['zpl_req']
slm_min_pl_thk = state['slamming'][current_line]['min_plate_thk']
slm_min_web_thk = state['slamming'][current_line]['min_web_thk']
slm_text_pl_thk = 'Minimum plate thickness (BOW SLAMMING): '+str(round(slm_min_pl_thk,1))+' [mm]' \
if obj_scnt_calc_stf.get_pl_thk() * 1000 < slm_min_pl_thk else None
slm_text_min_web_thk = 'Minimum web thickness (BOW SLAMMING): '+str(round(slm_min_web_thk,1))+' [mm]' \
if obj_scnt_calc_stf.get_web_thk()*1000 < slm_min_web_thk else None
if slm_zpl_req is not None:
slm_text_min_zpl = 'Minimum section modulus (BOW SLAMMING): '+str(round(slm_zpl_req,1))+' [cm^3]' \
if slm_zpl < slm_zpl_req else None
else:
slm_text_min_zpl = False
else:
slamming, slm_text_pl_thk, slm_text_min_web_thk, slm_text_min_zpl = [False for di in range(4)]
color_fatigue = state['colors'][current_line]['fatigue']
color_sec = state['colors'][current_line]['section']
color_shear = state['colors'][current_line]['shear']
color_thk = state['colors'][current_line]['thickness']
color_buckling = state['colors'][current_line]['buckling']
#printing the minimum section modulus
x1, x2, x3 = 15,25,35
self._result_canvas.create_text([x+0*dx, (y+0*dy)*1],
text= 'Special provisions - DNV-OS-C101 - checks for section, '
'web thickness and plate thickness.',
font=self._text_size["Text 9 bold"],anchor='nw', fill=self._color_text)
self._result_canvas.create_text([x+0*dx, (y+2*dy)*1],
text= 'Section modulus check',
font=self._text_size["Text 9"],anchor='nw', fill=self._color_text)
self._result_canvas.create_text([x+0*dx, (y+3*dy)*1],
text= 'Shear area check',
font=self._text_size["Text 9"],anchor='nw', fill=self._color_text)
self._result_canvas.create_text([x+0*dx, (y+4*dy)*1],
text= 'Plate thickness check',
font=self._text_size["Text 9"],anchor='nw', fill=self._color_text)
self._result_canvas.create_text([x + x1*dx, (y+1*dy)*1],
text= 'Minimum value',
font=self._text_size["Text 9"],anchor='nw', fill=self._color_text)
self._result_canvas.create_text([x+ x2*dx, (y+1*dy)*1],
text= 'Actual value',
font=self._text_size["Text 9"],anchor='nw', fill=self._color_text)
self._result_canvas.create_text([x+ x3*dx, (y+1*dy)*1],
text= 'Accepted?',
font=self._text_size["Text 9"],anchor='nw', fill=self._color_text)
if state['slamming'][current_line]['state'] and slm_text_min_zpl is False:
text = '(shear issue, change thickness or web height)'
else:
text = str('%.4E' % decimal.Decimal(min_sec_mod * m3_to_mm3)) +\
' [mm^3] ' if not slm_text_min_zpl else slm_text_min_zpl
self._result_canvas.create_text([x + x1*dx, (y+2*dy)*1], text= text,
font=self._text_size["Text 9 bold"],anchor='nw', fill=self._color_text)
# printing the calculated sectiton modulus
if state['slamming'][current_line]['state'] and slm_text_min_zpl is False:
text = ''
else:
text = str('%.4E' % decimal.Decimal(min(sec_mod[1], sec_mod[0])*m3_to_mm3))+ ' [mm^3]' \
if not slm_text_min_zpl else str(slm_zpl)+'- zpl [cm^3]'
self._result_canvas.create_text([x + x2*dx, (y+2*dy)*1],
text=text,font=self._text_size['Text 9 bold'],anchor='nw',
fill = color_sec)
if not state['slamming'][current_line]['state']:
self._result_canvas.create_text([x + x3*dx, (y+2*dy)*1],
text='Ok' if min(sec_mod[1], sec_mod[0])*m3_to_mm3 >=
min_sec_mod * m3_to_mm3 else 'Not ok',
font=self._text_size['Text 9 bold'],anchor='nw',
fill=color_sec)
#minimum shear area
text = str('%.4E' % decimal.Decimal(min_shear * m2_to_mm2))+' [mm^2] ' \
if not slm_text_min_web_thk else str(round(slm_min_web_thk,1))+' [mm]'
self._result_canvas.create_text([x + x1*dx, (y+3*dy)*1],
text = text,
font=self._text_size["Text 9 bold"],anchor='nw',fill=self._color_text)
text = str('%.4E' % decimal.Decimal(shear_area * m2_to_mm2 ))+' [mm^2]' \
if not slm_text_min_web_thk else str(obj_scnt_calc_stf.get_web_thk()*1000)+' [mm]'
self._result_canvas.create_text([x + x2*dx, (y+3*dy)*1],
text= text,
font=self._text_size["Text 9 bold"],anchor='nw', fill=color_shear)
if not state['slamming'][current_line]['state']:
self._result_canvas.create_text([x + x3*dx, (y+3*dy)*1],
text= 'Ok' if shear_area * m2_to_mm2 >= min_shear * m2_to_mm2 else
'Not ok',
font=self._text_size["Text 9 bold"],anchor='nw', fill=color_shear)
#minimum thickness for plate
text = str(round(min_thk,1)) + ' [mm]' if not state['slamming'][current_line]['state'] else \
'Slamming minimum thickness: '+str(round(slm_min_pl_thk,2))+' [mm]'
self._result_canvas.create_text([x + x1*dx, (y+4*dy)*1],
text=text,
font=self._text_size["Text 9 bold"],anchor='nw', fill=self._color_text)
if not state['slamming'][current_line]['state']:
self._result_canvas.create_text([x + x2*dx, (y+4*dy)*1],
text=str(obj_scnt_calc_pl.get_pl_thk()*1000)+' [mm] ',
font=self._text_size["Text 9 bold"],anchor='nw', fill=color_shear)
self._result_canvas.create_text([x + x3*dx, (y+4*dy)*1],
text='Ok' if obj_scnt_calc_pl.get_pl_thk()*1000 > min_thk
else 'Not ok',
font=self._text_size["Text 9 bold"],anchor='nw', fill=color_shear)
# buckling results
start_y, y = 5, 10
if self._PULS_results != None and self._new_buckling_method.get() == 'DNV PULS':
line_results = state['PULS colors'][self._active_line]
puls_res = self._PULS_results.get_puls_line_results(self._active_line)
if puls_res != None:
geo_problem = False
if type(puls_res['Ultimate capacity']['Actual usage Factor'][0]) != str:
ult_text = 'Ultimate capacity usage factor: ' + str(puls_res['Ultimate capacity']
['Actual usage Factor'][
0] / self._new_puls_uf.get())
else:
geo_problem = True
ult_text = puls_res['Ultimate capacity']['Actual usage Factor'][0]
if puls_res['Buckling strength']['Actual usage Factor'][0] != None:
buc_text = 'Buckling capacity usage factor: ' + str(puls_res['Buckling strength']
['Actual usage Factor'][
0] / self._new_puls_uf.get())
else:
buc_text = 'Buckling capacity usage factor: None - geometric issue'
loc_label = 'Local geom req (PULS validity limits)' if \
obj_scnt_calc_pl.get_puls_sp_or_up() == 'SP' else 'Geom. Req (PULS validity limits)'
csr_label = 'CSR-Tank requirements (primary stiffeners)' if \
obj_scnt_calc_pl.get_puls_sp_or_up() == 'SP' else 'CSR-Tank req'
if geo_problem:
loc_geom = 'Not ok: '
for key, value in puls_res[loc_label].items():
if value[0] == 'Not ok':
loc_geom += key + ' '
else:
loc_geom = 'Ok' if all(
[val[0] == 'Ok' for val in puls_res[loc_label]
.values()]) else 'Not ok'
csr_geom = 'Ok' if all(
[val[0] in ['Ok', '-'] for val in puls_res[csr_label]
.values()]) else 'Not ok'
loc_geom = loc_label + ': ' + loc_geom
csr_geom = csr_label+': ' + csr_geom
self._result_canvas.create_text([x * 1, y + (start_y+0) * dy], text='PULS results',
font=self._text_size['Text 9 bold'],
anchor='nw',
fill = self._color_text)
self._result_canvas.create_text([x * 1, y + (start_y+1) * dy], text=buc_text,
font=self._text_size['Text 9 bold'],
anchor='nw',
fill=line_results['buckling'])
self._result_canvas.create_text([x * 1, y + (start_y+2) * dy], text=ult_text,
font=self._text_size['Text 9 bold'],
anchor='nw',
fill=line_results['ultimate'])
self._result_canvas.create_text([x * 1, y + (start_y+3) * dy], text=loc_geom,
font=self._text_size['Text 9 bold'],
anchor='nw',
fill=line_results['local geometry'])
self._result_canvas.create_text([x * 1, y + (start_y+4) * dy], text=csr_geom,
font=self._text_size['Text 9 bold'],
anchor='nw',
fill=line_results['csr'])
else:
self._result_canvas.create_text([x * 1, y + (start_y+0) * dy],
text='PULS results not avaliable for this line.\n'
'Run or update lines.',
font=self._text_size['Text 9 bold'],
anchor='nw',
fill='Orange')
elif self._new_buckling_method.get() == 'DNV-RP-C201 - prescriptive':
'''
return {'Plate': {'Plate buckling': up_buckling}, 'Stiffener': {'Overpressure plate side': stf_buckling_pl_side,
'Overpressure stiffener side': stf_buckling_stf_side,
'Resistance between stiffeners': stf_plate_resistance,
'Shear capacity': stf_shear_capacity},
'Girder': {'Overpressure plate side': girder_buckling_pl_side,
'Overpressure girder side': girder_buckling_girder_side,
'Shear capacity': girder_shear_capacity},
'Local buckling': local_buckling}
'''
self._result_canvas.create_text([x * 1, (y+(start_y+0)*dy) * 1],
text='Buckling results DNV-RP-C201 - prescriptive - (plate, stiffener, girder):',
font=self._text_size["Text 9 bold"], anchor='nw',
fill = self._color_text)
self._result_canvas.create_text([x + dx*0, (y+(start_y+2)*dy) * 1],
text='Overpressure plate side',font=self._text_size["Text 9"],
anchor='nw',fill=self._color_text)
self._result_canvas.create_text([x + dx*0, (y+(start_y+3)*dy) * 1],
text='Overpressure stiffener side',font=self._text_size["Text 9"],
anchor='nw',fill=self._color_text)
self._result_canvas.create_text([x + dx*0, (y+(start_y+4)*dy) * 1],
text='Resistance between stiffeners',font=self._text_size["Text 9"],
anchor='nw',fill=self._color_text)
self._result_canvas.create_text([x + dx*0, (y+(start_y+5)*dy) * 1],
text='Shear capacity',font=self._text_size["Text 9"],
anchor='nw',fill=self._color_text)
self._result_canvas.create_text([x + dx*0, (y+(start_y+6)*dy) * 1],
text='Maximum web height [mm]',
font=self._text_size["Text 9"],
anchor='nw',fill=self._color_text)
self._result_canvas.create_text([x + dx*0, (y+(start_y+7)*dy) * 1],
text='Maximum flange width [mm]',
font=self._text_size["Text 9"],
anchor='nw',fill=self._color_text)
#'Local buckling'
x1, x2, x3 = 15,25,35
self._result_canvas.create_text([x + dx*15, (y+(start_y+1)*dy) * 1],
text='Plate',font=self._text_size["Text 9 bold"],
anchor='nw',fill=self._color_text)
self._result_canvas.create_text([x + dx*25, (y+(start_y+1)*dy) * 1],
text='Stiffener',font=self._text_size["Text 9 bold"],
anchor='nw',fill=self._color_text)
self._result_canvas.create_text([x + dx*35, (y+(start_y+1)*dy) * 1],
text='Girder',font=self._text_size["Text 9 bold"],
anchor='nw',fill=self._color_text)
x_mult = x1
self._result_canvas.create_text([x + dx*x_mult , (y+(start_y+2)*dy) * 1],
text=str(round(buckling['Plate']['Plate buckling'],3)),
font=self._text_size["Text 9 bold"],
anchor='nw',fill=color_buckling)
x_mult = x2
self._result_canvas.create_text([x + dx*x_mult, (y+(start_y+2)*dy) * 1],
text=str(round(buckling['Stiffener']['Overpressure plate side'],3)),
font=self._text_size["Text 9 bold"],
anchor='nw',fill=color_buckling)
self._result_canvas.create_text([x + dx*x_mult, (y+(start_y+3)*dy) * 1],
text=str(round(buckling['Stiffener']['Overpressure stiffener side'],3)),
font=self._text_size["Text 9 bold"],
anchor='nw',fill=color_buckling)
stfweb = round(buckling['Local buckling']['Stiffener'][0],3)*1000
stffl = round(buckling['Local buckling']['Stiffener'][1],3)*1000
self._result_canvas.create_text([x + dx*x_mult, (y+(start_y+4)*dy) * 1],
text=str(round(buckling['Stiffener']['Resistance between stiffeners'],3))
,font=self._text_size["Text 9 bold"],
anchor='nw',
fill= color_buckling)
self._result_canvas.create_text([x + dx*x_mult, (y+(start_y+5)*dy) * 1],
text=str(round(buckling['Stiffener']['Shear capacity'],3)),
font=self._text_size["Text 9 bold"],
anchor='nw',
fill=color_buckling)
self._result_canvas.create_text([x + dx*x_mult, (y+(start_y+6)*dy) * 1],
text=str(stfweb),
font=self._text_size["Text 9"],
anchor='nw',fill=self._color_text if obj_scnt_calc_stf is None else 'red'
if obj_scnt_calc_stf.hw > stfweb else 'green')
self._result_canvas.create_text([x + dx*x_mult, (y+(start_y+7)*dy) * 1],
text=str(stffl),
font=self._text_size["Text 9"],
anchor='nw',fill=self._color_text if obj_scnt_calc_stf is None else 'red'
if obj_scnt_calc_stf.b > stffl else 'green')
x_mult = x3
self._result_canvas.create_text([x + dx*x_mult, (y+(start_y+2)*dy) * 1],
text=str(round(buckling['Girder']['Overpressure plate side'],3)),
font=self._text_size["Text 9 bold"],
anchor='nw',fill=color_buckling)
self._result_canvas.create_text([x + dx*x_mult, (y+(start_y+3)*dy) * 1],
text=str(round(buckling['Girder']['Overpressure girder side'],3)),
font=self._text_size["Text 9 bold"],
anchor='nw',fill=color_buckling)
self._result_canvas.create_text([x + dx*x_mult, (y+(start_y+5)*dy) * 1],
text=str(round(buckling['Girder']['Shear capacity'],3)),
font=self._text_size["Text 9 bold"],
anchor='nw',fill=color_buckling)
gweb = round(buckling['Local buckling']['Girder'][0],3)*1000
gfl = round(buckling['Local buckling']['Girder'][1],3)*1000
self._result_canvas.create_text([x + dx*x_mult, (y+(start_y+6)*dy) * 1],
text=str(gweb),
font=self._text_size["Text 9"],
anchor='nw',fill=self._color_text if obj_scnt_calc_girder is None else
'red' if obj_scnt_calc_girder.hw > gweb else 'green')
self._result_canvas.create_text([x + dx*x_mult, (y+(start_y+7)*dy) * 1],
text=str(gfl),
font=self._text_size["Text 9"],
anchor='nw',fill=self._color_text if obj_scnt_calc_girder is None else
'red' if obj_scnt_calc_girder.b > gfl else 'green')
#
# self._result_canvas.create_text([x + dx*x_mult, (y+(start_y+7)*dy) * 1],
# text=str(round(buckling['Local buckling']['Girder'][1],3)),
# font=self._text_size["Text 9"],
# anchor='nw',fill=color_buckling)
elif self._new_buckling_method.get() == 'ML-CL (PULS based)':
self._result_canvas.create_text([x * 1, (y+(start_y+0)*dy) * 1],
text='Buckling results ANYstructure ML algorithm:',
font=self._text_size["Text 9 bold"], anchor='nw',
fill = self._color_text)
self._result_canvas.create_text([x * 1, (y+(start_y+1)*dy) * 1],
text='Buckling: ' + self._ML_classes[state['ML buckling class'][current_line]['buckling']],
font=self._text_size["Text 9 bold"],
anchor='nw', fill=state['ML buckling colors'][current_line]['buckling'])
self._result_canvas.create_text([x * 1, (y+(start_y+2)*dy) * 1],
text='Ultimate: ' +self._ML_classes[state['ML buckling class'][current_line]['ultimate']],
font=self._text_size["Text 9 bold"],
anchor='nw', fill=state['ML buckling colors'][current_line]['ultimate'])
if obj_scnt_calc_pl.get_puls_sp_or_up() == 'SP':
csr = state['ML buckling class'][current_line]['CSR']
csr_str = ['Ok' if csr[0] == 1 else 'Not ok', 'Ok' if csr[1] == 1 else 'Not ok',
'Ok' if csr[2] == 1 else 'Not ok', 'Ok' if csr[3] == 1 else 'Not ok']
self._result_canvas.create_text([x * 1, (y+(start_y+3)*dy) * 1],
text='CSR requirements (stiffener): plate-'+ csr_str[0]+ ' web-'+
csr_str[1] + ' web/flange ratio-'+ csr_str[2] +
' flange-'+ csr_str[3] ,
font=self._text_size["Text 9"],
anchor='nw',
fill=state['ML buckling colors'][current_line]['CSR requirement'])
else:
csr = state['ML buckling class'][current_line]['CSR']
csr_str = 'Ok' if csr[0] == 1 else 'Not ok'
self._result_canvas.create_text([x * 1, (y+(start_y+3)*dy) * 1],
text='CSR requirements (stiffener): Plate slenderness -'+
csr_str,
font=self._text_size["Text 9"],
anchor='nw',
fill=state['ML buckling colors'][current_line]['CSR requirement'])
# fatigue results
self._result_canvas.create_text([x * 1, (y+(start_y+8)*dy) * 1],
text='Fatigue results (DNVGL-RP-C203): ',
font=self._text_size["Text 9 bold"], anchor='nw', fill = self._color_text)
if self._line_to_struc[current_line][2] != None:
if state['fatigue'][current_line]['damage'] is not None:
damage = state['fatigue'][current_line]['damage']
dff = state['fatigue'][current_line]['dff']
self._result_canvas.create_text([x * 1, (y + (start_y+9) * dy) * 1],
text='Total damage (DFF not included): '+str(round(damage,3)) +
' | With DFF = '+str(dff)+' --> Damage: '+
str(round(damage*dff,3)),
font=self._text_size["Text 9 bold"], anchor='nw',
fill=color_fatigue)
else:
self._result_canvas.create_text([x * 1, (y + (start_y+9) * dy) * 1],
text='Total damage: NO RESULTS ',
font=self._text_size["Text 9 bold"],
anchor='nw', fill = self._color_text)
else:
self._result_canvas.create_text([x * 1, (y + (start_y+9) * dy) * 1],
text='Total damage: NO RESULTS ',
font=self._text_size["Text 9 bold"],
anchor='nw', fill = self._color_text)
elif self._active_line in self._line_to_struc and self._line_to_struc[self._active_line][5] is not None:
'''
Cylinder calculations
'cylinder' = {'Unstiffened shell': uf_unstf_shell,
'Longitudinal stiffened shell': uf_long_stf,
'Ring stiffened shell': uf_ring_stf,
'Heavy ring frame': uf_ring_frame,
'Column stability check': column_stability,
'Stiffener check': stiffener_check}
'''
cyl_obj = self._line_to_struc[self._active_line][5]
text = 'Results for cylinders and curved plates/panels:'
self._result_canvas.create_text([x * 1, y * 1],
text=text, font=self._text_size['Text 12 bold'], anchor='nw',
fill = self._color_text)
y_location = 3
results = cyl_obj.get_utilization_factors()
for key, value in results.items():
if key in ['Weight', 'Need to check column buckling']:
continue
if all([key != 'Stiffener check', key != 'Stiffener check detailed']):
text_key = key
if key == 'Column stability check':
if 'Need to check column buckling' in results.keys():
if results['Need to check column buckling'] == False:
continue
text_value = 'N/A' if value is None else 'OK' if value else 'Not ok'
else:
text_value = 'N/A' if value is None else str(round(value, 2))
if value is None:
uf_col = 'grey'
else:
uf_col = 'red' if any([value > 1, value == False]) else 'green'
self._result_canvas.create_text([x*1, y+dy*y_location],
text=text_key,font=self._text_size['Text 10 bold'],anchor='nw',
fill = self._color_text)
self._result_canvas.create_text([dx*20, dy*y_location],
text=text_value,font=self._text_size['Text 10 bold'],anchor='nw',
fill=uf_col)
elif key == 'Stiffener check':
if value is not None:
y_location +=1
self._result_canvas.create_text([x, dy*y_location],
text='Stiffener requirement checks:',
font=self._text_size['Text 10 bold'],
anchor='nw',
fill = self._color_text)
y_location += 1
idx_y, idx_x = 0, 0
for stf_type, chk_bool in value.items():
stf_text = stf_type
if stf_type == 'ring frame':
continue
chk_text = 'OK' if chk_bool == True else 'failed' if chk_bool == False else 'N/A'
self._result_canvas.create_text([15*dx*idx_x, dy*y_location],
text=stf_text, font=self._text_size['Text 10 bold'],
anchor='nw',
fill=self._color_text if not value else 'black')
self._result_canvas.create_text([15*dx*idx_x, y + (y_location+1)*dy],
text=chk_text, font=self._text_size['Text 10 bold'],
anchor='nw',
fill='green' if chk_bool == True else 'red' if
chk_bool == False else self._color_text)
self._result_canvas.create_text([15*dx*idx_x, y + (y_location+2)*dy],
text=results['Stiffener check detailed'][stf_type],
font=self._text_size['Text 10'],
anchor='nw',
fill='green' if chk_bool == True else 'red' if
chk_bool == False else self._color_text)
idx_y += 1
idx_x += 1
y_location += 1
def report_generate(self, autosave = False):
'''
Button is pressed to generate a report of the current structure.
:return:
'''
if not autosave:
save_file = filedialog.asksaveasfile(mode="w", defaultextension=".pdf")
filename = save_file.name
if save_file is None: # ask saveasfile return `None` if dialog closed with "cancel".
return
else:
filename = '../testrun.pdf'
if self._line_dict == {}:
tk.messagebox.showerror('No lines', 'No lines defined. Cannot make report.')
return
if os.path.isfile('../current_comps.png'):
os.remove('../current_comps.png')
self.grid_display_tanks(save = True)
else:
self.grid_display_tanks(save=True)
doc = LetterMaker(filename, "Section results", 10, self)
doc.createDocument()
doc.savePDF()
try:
os.startfile(filename)
except FileNotFoundError:
pass
self._new_colorcode_beams.set(False)
self._new_colorcode_plates.set(False)
self._new_colorcode_pressure.set(False)
self.update_frame()
def table_generate(self, autosave = False):
if not autosave:
save_file = filedialog.asksaveasfile(mode="w", defaultextension=".pdf")
filename = save_file.name
if save_file is None: # ask saveasfile return `None` if dialog closed with "cancel".
return
else:
filename = '../testrun.pdf'
if self._line_dict == {}:
tk.messagebox.showerror('No lines', 'No lines defined. Cannot make report.')
return
doc_dat = LetterMaker(filename, "Section results", 10, self)
doc = SimpleDocTemplate(filename, pagesize=landscape(letter))
elements = doc_dat.createTable()
doc.build(elements)
try:
os.startfile(filename)
except FileNotFoundError:
pass
self._new_colorcode_beams.set(False)
self._new_colorcode_plates.set(False)
self._new_colorcode_pressure.set(False)
self.update_frame()
def create_accelerations(self):
'''
Set the selected accelerations.
:return:
'''
try:
self._accelerations_dict['static'] = self._new_static_acc.get()
self._accelerations_dict['dyn_loaded'] = self._new_dyn_acc_loaded.get()
self._accelerations_dict['dyn_ballast'] = self._new_dyn_acc_ballast.get()
if len(self._tank_dict) != 0:
for tank, data in self._tank_dict.items():
data.set_acceleration(self._accelerations_dict)
for line, obj in self._line_to_struc.items():
obj[0].need_recalc = True
except TclError:
messagebox.showinfo(title='Input error', message='Input must be a number. Dots used not comma.')
def new_point(self,copy=False,move=False, redo = None):
'''
Adds a point number and coordinates to the point dictionary. Type is 'p1' = [x0,y0]
'''
try:
if copy:
x_coord = self._new_point_x.get()/1000 + self._point_dict[self._active_point][0]
y_coord = self._new_point_y.get()/1000 + self._point_dict[self._active_point][1]
elif move:
x_coord = self._new_point_x.get()/1000 + self._point_dict[self._active_point][0] \
if redo is None else redo[0]
y_coord = self._new_point_y.get()/1000 + self._point_dict[self._active_point][1]\
if redo is None else redo[1]
else:
x_coord = (self._new_point_x.get() / 1000)
y_coord = (self._new_point_y.get() / 1000)
# Finding name of the new point
current_point = ''
if move:
current_point, current_coords = self._active_point, self._point_dict[self._active_point]
else:
found_name = False
if len(self._point_dict) == 0:
current_point = 'point1'
found_name = True
else:
counter = 1
while not found_name:
current_point = 'point'+str(counter)
if current_point not in self._point_dict.keys():
found_name = True
else:
counter += 1
self._new_line_p1.set(get_num(current_point))
# Creating the point
# No point is created if another point is already there
if [x_coord,y_coord] not in self._point_dict.values():
self._point_dict[current_point] = [x_coord, y_coord]
self._active_point = current_point
if move:
self.logger(point=current_point, move_coords=(current_coords,[x_coord, y_coord]))
else:
self.logger(point=current_point, move_coords=None)
self.update_frame()
except TclError:
messagebox.showinfo(title='Input error', message='Input must be a number. Dots used not comma.')
def move_line(self,event = None):
if self._line_is_active:
line = self._line_dict[self._active_line]
for pt_num in self._line_dict[self._active_line]:
self._active_point = 'point'+str(pt_num)
self._point_is_active = True
self.move_point()
else:
messagebox.showinfo(title='Input error', message='A line must be selected (left click).')
def move_point(self, event = None, redo = None):
'''
Moving a point.
:return:
'''
if self._point_is_active:
self.new_point(move=True, redo=redo) # doing the actual moving
for line,data in self._line_dict.items():
# updating the span and deleting compartments (if not WT)
if get_num(self._active_point) in data:
coord1 = self._point_dict['point'+str(data[0])]
coord2 = self._point_dict['point'+str(data[1])]
if line in self._line_to_struc.keys():
self._line_to_struc[line][0].Plate.set_span(dist(coord1,coord2))
self._line_to_struc[line][0].Plate.set_span(dist(coord1, coord2))
if self._PULS_results is not None:
self._PULS_results.result_changed(line)
if self._line_to_struc[line][0].Plate.get_structure_type() not in ['GENERAL_INTERNAL_NONWT',
'FRAME']:
self._tank_dict = {}
self._main_grid.clear()
self._compartments_listbox.delete(0, 'end')
for line, obj in self._line_to_struc.items():
obj[0].need_recalc = True
self.update_frame()
else:
messagebox.showinfo(title='Input error', message='A point must be selected (right click).')
def copy_point(self, event = None):
'''
Using the same input as new point, but with separate button.
:return:
'''
if self._point_is_active:
self.new_point(copy=True)
else:
messagebox.showinfo(title='Input error', message='A point must be selected (right click).')
def new_line(self, event = None, redo = None):
'''
Adds line to line dictionary. Type is 'line1' = [p1,p2]
'''
try:
# if's ensure that the new line does not exist already and that the point input is not an invalid point.
if redo is None:
first_point, second_point = 'point' + str(self._new_line_p1.get()), \
'point' + str(self._new_line_p2.get())
else:
first_point, second_point = redo
first_point_num, second_point_num = get_num(first_point), get_num(second_point)
if first_point in self._point_dict.keys() and second_point in self._point_dict.keys() \
and first_point != second_point:
line_str, line_str_rev = self.make_point_point_line_string(first_point_num, second_point_num)
if line_str and line_str_rev not in self._line_point_to_point_string:
name = False
counter = 1
while not name:
current_name = 'line' + str(counter)
if current_name not in self._line_dict.keys():
name = True
counter += 1
self._line_dict[current_name] = [first_point_num, second_point_num]
self.update_frame()
self.logger(line=[current_name, redo])
# making stings from two points difining the lines, e.g. for line 1 string could be 'p1p2' and 'p2p1'
self._line_point_to_point_string.append(line_str)
self._line_point_to_point_string.append(line_str_rev)
self.add_to_combinations_dict(current_name)
for line, obj in self._line_to_struc.items():
obj[0].need_recalc = True
except TclError:
messagebox.showinfo(title='Input error', message='Input must be a line number.')
def new_structure(self, event = None, pasted_structure = None, multi_return = None, toggle_multi = None,
suspend_recalc = False, cylinder_return = None):
'''
This method maps the structure to the line when clicking "add structure to line" button.
The result is put in a dictionary. Key is line name and value is the structure object.
self_line_to_stuc
[0] AllStructure class
[1] None
[2] calc fatigue class instance
[3] load class instance
[4] None
[5] Cylinder buckling data
:return:
'''
if multi_return is None:
self.save_no_dialogue(backup=True) #keeping a backup
if all([pasted_structure == None, multi_return == None]):
missing_input = False
if self._new_calculation_domain.get() in ['Flat plate, stiffened', 'Flat plate, unstiffened',
'Flat plate, stiffened with girder']:
if any([self._new_stf_spacing.get()==0, self._new_plate_thk.get()==0, self._new_stf_web_h.get()==0,
self._new_stf_web_t.get()==0]): # TODO must account for calculation domain
missing_input = True
if missing_input:
mess = tk.messagebox.showwarning('No propertied defined', 'No properties is defined for the line!\n'
'Define spacing, web height, web thickness etc.\n'
'Either press button with stiffener or input '
'manually.', type='ok')
return
if self._line_is_active or multi_return != None:
# structure dictionary: name of line : [ 0.Structure class, 1.calc scantling class,
# 2.calc fatigue class, 3.load object, 4.load combinations result ]
CylinderObj = None
if multi_return is not None:
prop_dict = multi_return[0].get_main_properties() # From optimizer.
elif toggle_multi is not None:
prop_dict = toggle_multi
elif pasted_structure is None:
calc_dom = self._new_calculation_domain.get()
obj_dict = {'mat_yield': [self._new_material.get()*1e6, 'Pa'],
'mat_factor': [self._new_material_factor.get(), ''],
'span': [self._new_field_len.get()/1000, 'm'],
'spacing': [self._new_stf_spacing.get()/1000, 'm'],
'plate_thk': [self._new_plate_thk.get()/1000, 'm'],
'stf_web_height': [self._new_stf_web_h.get()/1000, 'm'],
'stf_web_thk': [self._new_stf_web_t.get()/1000, 'm'],
'stf_flange_width': [self._new_stf_fl_w.get()/1000, 'm'],
'stf_flange_thk': [self._new_stf_fl_t.get()/1000, 'm'],
'structure_type': [self._new_stucture_type.get(), ''],
'stf_type': [self._new_stf_type.get(), ''],
'sigma_y1': [self._new_sigma_y1.get(), 'MPa'],
'sigma_y2': [self._new_sigma_y2.get(), 'MPa'],
'sigma_x1': [self._new_sigma_x1.get(), 'MPa'],
'sigma_x2': [self._new_sigma_x2.get(), 'MPa'],
'tau_xy': [self._new_tauxy.get(), 'MPa'],
'plate_kpp': [self._new_plate_kpp.get(), ''],
'stf_kps': [self._new_stf_kps.get(), ''],
'stf_km1': [self._new_stf_km1.get(), ''],
'stf_km2': [self._new_stf_km2.get(), ''],
'stf_km3': [self._new_stf_km3.get(), ''],
'press_side': [self._new_pressure_side.get(), ''],
'structure_types':[self._structure_types, ''],
'zstar_optimization': [self._new_zstar_optimization.get(), ''],
'puls buckling method': [self._new_puls_method.get(), ''],
'puls boundary': [self._new_puls_panel_boundary.get(), ''],
'puls stiffener end': [self._new_buckling_stf_end_support.get(), ''],
'puls sp or up': [self._new_puls_sp_or_up.get(), ''],
'puls up boundary': [self._new_puls_up_boundary.get(), ''],
'panel or shell': [self._new_panel_or_shell.get(), ''],
'girder_lg': [self._new_girder_length_LG.get()/1000, '']}
obj_dict_pl = copy.copy(obj_dict)
obj_dict_stf = copy.copy(obj_dict)
obj_dict_girder = copy.copy(obj_dict)
obj_dict_girder['stf_web_height'] = [self._new_girder_web_h.get()/1000, 'm']
obj_dict_girder['stf_web_thk'] = [self._new_girder_web_t.get() / 1000, 'm']
obj_dict_girder['stf_flange_width'] = [self._new_girder_fl_w.get() / 1000, 'm']
obj_dict_girder['stf_flange_thk'] = [self._new_girder_fl_t.get() / 1000, 'm']
obj_dict_girder['stf_type'] = [self._new_girder_type.get(), '']
main_dict = dict()
main_dict['minimum pressure in adjacent spans'] = [self._new_buckling_min_press_adj_spans.get(), '']
main_dict['material yield'] = [self._new_material.get()*1e6, 'Pa']
main_dict['load factor on stresses'] = [self._new_buckling_lf_stresses.get(), '']
main_dict['load factor on pressure'] = [1, '']
main_dict['buckling method'] = [self._new_puls_method.get(), '']
main_dict['stiffener end support'] =[self._new_buckling_stf_end_support.get(), ''] # 'Continuous'
main_dict['girder end support'] = [self._new_buckling_girder_end_support.get(), ''] # 'Continuous'
main_dict['tension field'] = [self._new_buckling_tension_field.get(), ''] # 'not allowed'
main_dict['plate effective agains sigy'] = [self._new_buckling_effective_against_sigy.get(), ''] # True
main_dict['buckling length factor stf'] = [self._new_buckling_length_factor_stf.get(), '']
main_dict['buckling length factor girder'] = [self._new_buckling_length_factor_stf.get(), '']
main_dict['km3'] = [self._new_buckling_km3.get(), ''] # 12
main_dict['km2'] = [self._new_buckling_km2.get(), ''] # 24
main_dict['girder distance between lateral support'] = [self._new_buckling_girder_dist_bet_lat_supp.get(), '']
main_dict['stiffener distance between lateral support'] = [self._new_buckling_stf_dist_bet_lat_supp.get(), '']
main_dict['panel length, Lp'] = [self._new_panel_length_Lp.get(), '']
main_dict['pressure side'] = [self._new_pressure_side.get(), ''] # either 'stiffener', 'plate', 'both'
main_dict['fabrication method stiffener'] = [self._new_buckling_fab_method_stf.get(), '']
main_dict['fabrication method girder'] = [self._new_buckling_fab_method_girder.get(), '']
main_dict['calculation domain'] = [self._new_calculation_domain.get(), '']
prop_dict = {'main dict': main_dict,
'Plate': obj_dict_pl,
'Stiffener': None if calc_dom == 'Flat plate, unstiffened' else obj_dict_stf,
'Girder': None if calc_dom in ['Flat plate, unstiffened', 'Flat plate, stiffened']
else obj_dict_girder}
if self._new_calculation_domain.get() not in ['Flat plate, stiffened','Flat plate, unstiffened',
'Flat plate, stiffened with girder'] and cylinder_return is None:
'''
Shell structure.
0:'Stiffened panel, flat', 1:'Unstiffened shell (Force input)', 2:'Unstiffened panel (Stress input)',
3:'Longitudinal Stiffened shell (Force input)', 4:'Longitudinal Stiffened panel (Stress input)',
5:'Ring Stiffened shell (Force input)', 6:'Ring Stiffened panel (Stress input)',
7:'Orthogonally Stiffened shell (Force input)', 8:'Orthogonally Stiffened panel (Stress input)'
'''
domain_string = self._new_calculation_domain.get()
domain_int = self._shell_geometries_map[domain_string]
dummy_data = {'span': [self._new_field_len.get()/1000, 'm'],
'plate_thk': [self._new_plate_thk.get()/1000, 'm'],
'structure_type': [self._new_stucture_type.get(), ''],
'sigma_y1': [self._new_sigma_y1.get(), 'MPa'],
'sigma_y2': [self._new_sigma_y2.get(), 'MPa'],
'sigma_x1': [self._new_sigma_x1.get(), 'MPa'],
'sigma_x2': [self._new_sigma_x2.get(), 'MPa'],
'tau_xy': [self._new_tauxy.get(), 'MPa'],
'plate_kpp': [self._new_plate_kpp.get(), ''],
'stf_kps': [self._new_stf_kps.get(), ''],
'stf_km1': [self._new_stf_km1.get(), ''],
'stf_km2': [self._new_stf_km2.get(), ''],
'stf_km3': [self._new_stf_km3.get(), ''],
'press_side': [self._new_pressure_side.get(), ''],
'structure_types':[self._structure_types, ''],
'zstar_optimization': [self._new_zstar_optimization.get(), ''],
'puls buckling method': [self._new_puls_method.get(), ''],
'puls boundary': [self._new_puls_panel_boundary.get(), ''],
'puls stiffener end': [self._new_buckling_stf_end_support.get(), ''],
'puls sp or up': [self._new_puls_sp_or_up.get(), ''],
'puls up boundary': [self._new_puls_up_boundary.get(), ''],
'panel or shell': [self._new_panel_or_shell.get(), ''],
'mat_factor': [self._new_material_factor.get(), '',],
'spacing': [self._new_stf_spacing.get()/1000, 'm'],}
# Main class input
# Shell data input
shell_dict = {'plate_thk': [self._new_shell_thk.get() / 1000, 'm'],
'radius': [self._new_shell_radius.get() / 1000, 'm'],
'distance between rings, l': [self._new_shell_dist_rings.get() / 1000, 'm'],
'length of shell, L': [self._new_shell_length.get() / 1000, 'm'],
'tot cyl length, Lc': [self._new_shell_tot_length.get() / 1000, 'm'],
'eff. buckling lenght factor': [self._new_shell_k_factor.get(), ''],
'mat_yield': [self._new_shell_yield.get() * 1e6, 'Pa'],
}
# Longitudinal stiffener input
long_dict = {'spacing': [self._new_stf_spacing.get() / 1000, 'm'],
'stf_web_height': [self._new_stf_web_h.get() / 1000, 'm'],
'stf_web_thk': [self._new_stf_web_t.get() / 1000, 'm'],
'stf_flange_width': [self._new_stf_fl_w.get() / 1000, 'm'],
'stf_flange_thk': [self._new_stf_fl_t.get() / 1000, 'm'],
'stf_type': [self._new_stf_type.get(), ''],
'span': [self._new_field_len.get()/1000, 'm'],
'mat_yield': [self._new_shell_yield.get() * 1e6, 'Pa'],
'panel or shell': ['shell', '']}
ring_stf_dict = {'stf_web_height': [self._new_shell_ring_stf_hw.get() / 1000, 'm'],
'stf_web_thk': [self._new_shell_ring_stf_tw.get() / 1000, 'm'],
'stf_flange_width': [self._new_shell_ring_stf_b.get() / 1000, 'm'],
'stf_flange_thk': [self._new_shell_ring_stf_tf.get() / 1000, 'm'],
'stf_type': [self._new_shell_ring_stf_type.get(), ''],
'mat_yield': [self._new_shell_yield.get() * 1e6, 'Pa'],
'panel or shell': ['shell', '']}
ring_frame_dict = {'stf_web_height': [self._new_shell_ring_frame_hw.get() / 1000, 'm'],
'stf_web_thk': [self._new_shell_ring_frame_tw.get() / 1000, 'm'],
'stf_flange_width': [self._new_shell_ring_frame_b.get() / 1000, 'm'],
'stf_flange_thk': [self._new_shell_ring_frame_tf.get() / 1000, 'm'],
'stf_type': [self._new_shell_ring_frame_type.get(), ''],
'span': [self._new_field_len.get()/1000, 'm'],
'mat_yield': [self._new_shell_yield.get() * 1e6, 'Pa'],
'panel or shell': ['shell', '']}
geometry = self._shell_geometries_map[self._new_calculation_domain.get()]
if self._new_shell_stress_or_force.get() == 1:
forces = [self._new_shell_Nsd.get(), self._new_shell_Msd.get(), \
self._new_shell_Tsd.get(), self._new_shell_Qsd.get()]
sasd, smsd, tTsd, tQsd, shsd = hlp.helper_cylinder_stress_to_force_to_stress(
stresses=None, forces=forces, geometry=geometry, shell_t=self._new_shell_thk.get(),
shell_radius=self._new_shell_radius.get(), shell_spacing=self._new_stf_spacing.get(),
hw=self._new_stf_web_h.get(), tw=self._new_stf_web_t.get(), b=self._new_stf_fl_w.get(),
tf=self._new_stf_fl_t.get(), CylinderAndCurvedPlate=CylinderAndCurvedPlate)
self._new_shell_sasd.set(sasd)
self._new_shell_smsd.set(smsd)
self._new_shell_tTsd.set(tTsd)
self._new_shell_tQsd.set(tQsd)
#self._new_shell_shsd.set(0)
else:
stresses = [self._new_shell_sasd.get(), self._new_shell_smsd.get(),
abs(self._new_shell_tTsd.get()),
self._new_shell_tQsd.get(), self._new_shell_shsd.get()]
sasd, smsd, tTsd, tQsd, shsd = stresses
Nsd, Msd, Tsd, Qsd, shsd = hlp.helper_cylinder_stress_to_force_to_stress(
stresses=stresses, geometry=geometry, shell_t=self._new_shell_thk.get(),
shell_radius=self._new_shell_radius.get(), shell_spacing=self._new_stf_spacing.get(),
hw=self._new_stf_web_h.get(), tw=self._new_stf_web_t.get(), b=self._new_stf_fl_w.get(),
tf=self._new_stf_fl_t.get(), CylinderAndCurvedPlate=CylinderAndCurvedPlate)
self._new_shell_Nsd.set(Nsd)
self._new_shell_Msd.set(Msd)
self._new_shell_Tsd.set(Tsd)
self._new_shell_Qsd.set(Qsd)
main_dict_cyl = {'sasd': [sasd*1e6, 'Pa'],
'smsd': [smsd*1e6, 'Pa'],
'tTsd': [tTsd*1e6, 'Pa'],
'tQsd': [tQsd*1e6, 'Pa'],
'psd': [self._new_shell_psd.get() *1e6, 'Pa'],
'shsd': [shsd *1e6, 'Pa'],
'geometry': [self._shell_geometries_map[self._new_calculation_domain.get()], ''],
'material factor': [self._new_shell_mat_factor.get(), ''],
'delta0': [0.005, ''],
'fab method ring stf': [self._new_shell_ring_stf_fab_method.get(), ''],
'fab method ring girder': [self._new_shell_ring_frame_fab_method.get(), ''],
'E-module': [self._new_shell_e_module.get(), 'Pa'],
'poisson': [self._new_shell_poisson.get(), ''],
'mat_yield': [self._new_shell_yield.get() *1e6, 'Pa'],
'length between girders': [self._new_shell_ring_frame_length_between_girders.get()/1000, 'm'],
'panel spacing, s': [self._new_shell_panel_spacing.get()/1000, 'm'],
'ring stf excluded': [self._new_shell_exclude_ring_stf.get(), ''],
'ring frame excluded': [self._new_shell_exclude_ring_frame.get(), '',],
'ULS or ALS': [self._new_shell_uls_or_als.get(), '',],
'end cap pressure': [self._new_shell_end_cap_pressure_included.get(), '']
}
for key, value in dummy_data.items():
if key not in long_dict.keys():
long_dict[key] = value
if key not in ring_stf_dict.keys():
ring_stf_dict[key] = value
if key not in ring_frame_dict.keys():
ring_frame_dict[key] = value
CylinderObj = CylinderAndCurvedPlate(main_dict_cyl, Shell(shell_dict),
long_stf=None if geometry in [1,2,5,6]
else Structure(long_dict),
ring_stf=None if any([geometry in [1,2,3,4],
self._new_shell_exclude_ring_stf.get()])
else Structure(ring_stf_dict),
ring_frame=None if any([geometry in [1,2,3,4],
self._new_shell_exclude_ring_frame.get()])
else Structure(ring_frame_dict))
elif cylinder_return is not None:
main_dict_cyl, shell_dict, long_dict, ring_stf_dict, ring_frame_dict = \
cylinder_return.get_all_properties()
else:
prop_dict = pasted_structure.get_main_properties()
if self._active_line not in self._line_to_struc.keys() :
self._line_to_struc[self._active_line] = [None, None, None, [None], {}, None]
# First entry
# Flat plate domains: 'Flat plate, stiffened with girder', 'Flat plate, stiffened', Flat plate, unstiffened'
cdom = self._new_calculation_domain.get()
All = AllStructure(Plate=CalcScantlings(prop_dict['Plate']),
Stiffener=None if cdom == 'Flat plate, unstiffened'
else CalcScantlings(prop_dict['Stiffener']),
Girder=None if cdom in ['Flat plate, unstiffened', 'Flat plate, stiffened']
else CalcScantlings(prop_dict['Girder']),
main_dict=prop_dict['main dict'])
self._sections = add_new_section(self._sections, struc.Section(obj_dict_stf)) # TODO error when pasting
self._line_to_struc[self._active_line][0] = All
self._line_to_struc[self._active_line][5] = CylinderObj
if self._line_to_struc[self._active_line][0].Plate.get_structure_type() not in \
self._structure_types['non-wt']:
self._tank_dict = {}
self._main_grid.clear()
self._compartments_listbox.delete(0, 'end')
if self._new_calculation_domain.get() not in ['Flat plate, stiffened','Flat plate, unstiffened',
'Flat plate, stiffened with girder']:
CylinderObj = CylinderAndCurvedPlate(main_dict_cyl, Shell(shell_dict),
long_stf=None if geometry in [1,2,5,6]
else Structure(long_dict),
ring_stf=None if any([geometry in [1,2,3,4],
self._new_shell_exclude_ring_stf.get()])
else Structure(ring_stf_dict),
ring_frame=None if any([geometry in [1,2,3,4],
self._new_shell_exclude_ring_frame.get()])
else Structure(ring_frame_dict))
self._line_to_struc[self._active_line][5] = CylinderObj
else:
# if self._new_calculation_domain.get() in ['Flat plate, stiffened','Flat plate, unstiffened',
# 'Flat plate, stiffened with girder'] and \
# self._line_to_struc[self._active_line][5] is not None:
# self._line_to_struc[self._active_line][5] = None
prev_type = self._line_to_struc[self._active_line][0].Plate.get_structure_type()
prev_all_obj = copy.deepcopy(self._line_to_struc[self._active_line][0])
self._line_to_struc[self._active_line][0].set_main_properties(prop_dict)
if self._new_scale_stresses.get() and prev_all_obj.get_main_properties() != \
self._line_to_struc[self._active_line][0].get_main_properties():
if prev_all_obj.Stiffener is not None:
plate = self._line_to_struc[self._active_line][0].Plate
stiffener = self._line_to_struc[self._active_line][0].Stiffener
girder = self._line_to_struc[self._active_line][0].Girder
calc_tup = (plate.get_s(), plate.get_pl_thk(), stiffener.get_web_h(), stiffener.get_web_thk(),
stiffener.get_fl_w(),
stiffener.get_fl_thk(), plate.get_span(), stiffener.get_lg() if girder is None else
girder.get_lg(), stiffener.stiffener_type)
else:
calc_tup = self._line_to_struc[self._active_line][0].Plate.get_tuple()
self._line_to_struc[self._active_line][0] = op.create_new_calc_obj(prev_all_obj, calc_tup,
fup=self._new_fup.get(),
fdwn=self._new_fdwn.get())[0]
self._line_to_struc[self._active_line][0].need_recalc = True
if self._line_to_struc[self._active_line][2] is not None:
calc_dom = self._line_to_struc[self._active_line][0].calculation_domain
if calc_dom == 'Flat plate, unstiffened':
self._line_to_struc[self._active_line][2] = None
else:
self._line_to_struc[self._active_line][2].set_main_properties(prop_dict['Stiffener'])
if prev_type in self._structure_types['non-wt'] and prop_dict['Plate']['structure_type'][0] in \
self._structure_types['internals'] + self._structure_types['horizontal'] + \
self._structure_types['vertical']:
self._tank_dict = {}
self._main_grid.clear()
self._compartments_listbox.delete(0, 'end')
if all([CylinderObj is None, cylinder_return is None,
self._line_to_struc[self._active_line][5] is not None]):
self._line_to_struc[self._active_line][5] = None
elif CylinderObj is not None:
if self._line_to_struc[self._active_line][5] is not None and self._new_scale_stresses.get():
NewCylinderObj = op.create_new_cylinder_obj(self._line_to_struc[self._active_line][5],
CylinderObj.get_x_opt())
NewCylinderObj.LongStfObj = None if CylinderObj.LongStfObj is None \
else NewCylinderObj.LongStfObj
NewCylinderObj.RingStfObj = None if CylinderObj.RingStfObj is None \
else NewCylinderObj.RingStfObj
NewCylinderObj.RingFrameObj = None if CylinderObj.RingFrameObj is None \
else NewCylinderObj.RingFrameObj
self._line_to_struc[self._active_line][5] = CylinderObj
elif cylinder_return is not None:
self._line_to_struc[self._active_line][5] = cylinder_return
try:
self.calculate_all_load_combinations_for_line_all_lines()
except (KeyError, AttributeError):
pass
else:
pass
if self._PULS_results != None:
self._PULS_results.result_changed(self._active_line)
if not suspend_recalc:
# when changing multiple parameters, recalculations are suspended.
for line, obj in self._line_to_struc.items():
obj[0].need_recalc = True
state = self.update_frame()
if state != None and self._line_is_active:
self._weight_logger['new structure']['COG'].append(self.get_color_and_calc_state()['COG'])
self._weight_logger['new structure']['weight'].append(self.get_color_and_calc_state()['Total weight'])
self._weight_logger['new structure']['time'].append(time.time())
self.cylinder_gui_mods()
self.get_unique_plates_and_beams()
def option_meny_structure_type_trace(self, event):
''' Updating of the values in the structure type option menu. '''
self._new_sigma_y1.set(self._default_stresses[self._new_stucture_type.get()][0])
self._new_sigma_y2.set(self._default_stresses[self._new_stucture_type.get()][1])
self._new_sigma_x1.set(self._default_stresses[self._new_stucture_type.get()][2])
self._new_sigma_x2.set(self._default_stresses[self._new_stucture_type.get()][3])
self._new_tauxy.set(self._default_stresses[self._new_stucture_type.get()][4])
if self._new_stucture_type.get() in self._structure_types['vertical']:
text = '(Vertical pressure calc.)'
elif self._new_stucture_type.get() in self._structure_types['horizontal']:
text = '(Horizontal pressure calc.)'
elif self._new_stucture_type.get() in self._structure_types['non-wt']:
text = '(Non-WT (pressure = 0))'
elif self._new_stucture_type.get() in self._structure_types['internals']:
text = '(Internal, pressure from comp.)'
else:
text = ''
self._new_stucture_type_label.set(text)
def tank_density_trace(self, event):
''' Setting tank densities '''
self._new_density.set(self._tank_options[self._new_content_type.get()])
def new_tank(self,comp_no,cells, min_el, max_el):
'''
Creating the tanks.
:return:
'''
# points, self._point_dict, content), point
self.save_no_dialogue(backup=True) # keeping a backup
temp_tank_dict = { 'comp_no' : comp_no,
'cells' : cells,
'min_el' : min_el[1],
'max_el' : max_el[1],
'content' : self._new_content_type.get(),
'added_press' : self._new_overpresure.get(),
'acc' : self._accelerations_dict,
'density' : self._new_density.get(),
'all_types' : self._options_type}
self._tank_dict['comp' + str(comp_no)] = Tanks(temp_tank_dict)
if self.__returned_load_data is not None:
map(self.on_close_load_window, self.__returned_load_data)
self.get_cob() # Recalculating COB
def get_cob(self):
'''
Calculation of center of buoyancy.
'''
self._center_of_buoyancy = dict()
self._center_of_buoyancy['all'] = self._grid_calc.grid.get_center_of_matrix(scale=self._base_scale_factor)
for load, data in self._load_dict.items():
if data[0].is_static():
draft = data[0].get_static_draft()
cob = self._grid_calc.grid.get_center_of_matrix(height_limit=draft, scale=self._base_scale_factor)
self._center_of_buoyancy[draft] = cob
def calculate_all_load_combinations_for_line_all_lines(self):
'''
Calculating all results.
:return:
'''
line_results = {}
for line, data in self._line_to_struc.items():
line_results[line] = data[1].is_acceptable_sec_mod(
data[1].get_section_modulus(), self.get_highest_pressure(line)['normal'])
return line_results
def calculate_all_load_combinations_for_line(self, line, limit_state = 'ULS', get_load_info = False):
'''
Calculating pressure for line.
self._load_factors_dict = {'dnva':[1.3,1.2,0.7], 'dnvb':[1,1,1.3], 'tanktest':[1,1,1]} # DNV loads factors
self._load_conditions = ['loaded', 'ballast','tanktest']
:return:
'''
if limit_state == 'FLS':
return
results = {} #dict - dnva/dnvb/tanktest/manual
load_info = []
# calculating for DNV a and DNV b
for dnv_ab in ['dnva', 'dnvb']: #, load_factors in self._load_factors_dict.items():
results[dnv_ab] = []
for load_condition in self._load_conditions[0:2]:
returned = self.calculate_one_load_combination(line, dnv_ab, load_condition)
if returned != None:
results[dnv_ab].append(returned[0])
[load_info.append(val) for val in returned[1]]
# calculating for tank test condition
results['tanktest'] = []
res_val = self.calculate_one_load_combination(line, "tanktest", 'tanktest')
results['tanktest'].append(res_val[0])
[load_info.append(val) for val in res_val[1]]
# calculating for manual condition
results['manual'] = []
res_val = self.calculate_one_load_combination(line, 'manual', 'manual')
results['manual'].append(res_val[0])
[load_info.append(val) for val in res_val[1]]
results['slamming'] = []
res_val = self.calculate_one_load_combination(line, 'slamming', 'slamming')
results['slamming'].append(res_val[0])
[load_info.append(val) for val in res_val[1]]
if get_load_info:
return load_info
return results
def calculate_one_load_combination(self, current_line, comb_name, load_condition):
'''
Creating load combination for ULS.
Inserted into self._line_to_struc index = 4
"dnva", "line12", "static_ballast_10m"
#load combination dictionary (comb,line,load) : [stat - DoubleVar(), dyn - DoubleVar], on/off - IntVar()]
:return:
'''
defined_loads = []
for load_obj in self._line_to_struc[current_line][3]:
if load_obj is not None:
if load_obj.get_limit_state() != 'FLS':
defined_loads.append(load_obj)
if self._tank_dict == {}:
defined_tanks = []
else:
defined_tanks = [['comp'+str(int(tank_num)), self._tank_dict['comp'+str(int(tank_num))]]
for tank_num in self.get_compartments_for_line_duplicates(current_line)]
coord = (self.get_line_radial_mid(current_line), self.get_line_low_elevation(current_line))
if load_condition not in ['tanktest','manual','slamming']:
acc = (self._accelerations_dict['static'], self._accelerations_dict['dyn_'+str(load_condition)])
else:
acc = (self._accelerations_dict['static'], 0)
load_factors_all = self._new_load_comb_dict
current_line_obj = [current_line, self._line_to_struc[current_line][0].Plate]
if self._line_to_struc[current_line][0].Plate.get_structure_type() in ['', 'FRAME','GENERAL_INTERNAL_NONWT']:
return [0, '']
else:
return_value = one_load_combination(current_line_obj, coord, defined_loads, load_condition,
defined_tanks, comb_name, acc, load_factors_all)
return return_value
def run_optimizer_for_line(self,line,goal,constrains):
'''
Returning result of a oprimize process
:param line:
:param goal:
:param constrains:
:return:
'''
pass
def update_tank(self):
'''
Updating properties of the tank object that was created during BFS search.
:return:
'''
if len(list(self._tank_dict.keys())) == 0:
return
current_tank = self._tank_dict['comp' + str(self._compartments_listbox.get('active'))]
current_tank.set_overpressure(self._new_overpresure.get())
current_tank.set_content(self._new_content_type.get())
current_tank.set_acceleration(self._accelerations_dict)
current_tank.set_density(self._new_density.get())
for line, obj in self._line_to_struc.items():
obj[0].need_recalc = True
if self._compartments_listbox.get('active') in self.get_compartments_for_line(line):
self._PULS_results.result_changed(line)
def delete_line(self, event = None, undo = None, line = None):
'''
Deleting line and line properties.
:return:
'''
try:
if line is not None:
line = line
else:
line = 'line' + str(self._ent_delete_line.get())
if line in self._line_dict.keys() or undo is not None:
line = line if undo is None else undo
point_str = 'p' + str(self._line_dict[line][0]) + 'p' + str(self._line_dict[line][1])
point_str_rev = 'p' + str(self._line_dict[line][1]) + 'p' + str(self._line_dict[line][0])
if line in self._line_dict.keys():
if line in self._line_to_struc.keys():
if self._line_to_struc[line][0].Plate.get_structure_type() not in self._structure_types['non-wt']:
self.delete_properties_pressed()
self.delete_all_tanks()
self._line_dict.pop(line)
if line in self._line_to_struc.keys():
self._line_to_struc.pop(line)
self._line_point_to_point_string.pop(self._line_point_to_point_string.index(point_str))
self._line_point_to_point_string.pop(self._line_point_to_point_string.index(point_str_rev))
self._active_line = ''
# Removing from load dict
if self._load_dict != {}:
loads = list(self._load_dict.keys())
for load in loads:
if line in self._load_dict[load][1]:
self._load_dict[load][1].pop(self._load_dict[load][1].index(line))
# Removing from puls results
if self._PULS_results is not None:
self._PULS_results.result_changed(line)
self.update_frame()
else:
messagebox.showinfo(title='No line.', message='Input line does noe exist.')
except TclError:
messagebox.showinfo(title='Input error', message='Input must be a number. Dots used not comma.')
def delete_point(self, event = None, undo = None, point = None):
'''
Deleting point and connected lines.
'''
try:
if point == None:
point = 'point' + str(self._ent_delete_point.get()) if undo is None else undo
if point in self._point_dict.keys():
line_to_delete = []
# finding the lines that needs to be deleted
for line, points in self._line_dict.items():
if int(self._ent_delete_point.get()) in points:
line_to_delete.append(line)
# deleting the lines and the connected properties. also deleting point to point string list items.
for line in list(line_to_delete):
self.delete_line(line = line)
# point_str = 'p' + str(self._line_dict[line][0]) + 'p' + str(self._line_dict[line][1])
# point_str_rev = 'p' + str(self._line_dict[line][1]) + 'p' + str(self._line_dict[line][0])
# self._line_point_to_point_string.pop(self._line_point_to_point_string.index(point_str))
# self._line_point_to_point_string.pop(self._line_point_to_point_string.index(point_str_rev))
# self._line_dict.pop(line)
# # properties are deleted here
# if line in self._line_to_struc.keys():
# self._line_to_struc.pop(line)
# at the en, the points is deleted from the point dict.
self._point_dict.pop(point)
self._active_point = ''
else:
messagebox.showinfo(title='No point.', message='Input point does not exist.')
self.update_frame()
except TclError:
messagebox.showinfo(title='Input error', message='Input must be a number. Dots used not comma.')
def delete_key_pressed(self, event = None):
if self._active_line != '':
self.delete_line(line = self._active_line)
if self._active_point != '':
self.delete_point()
def copy_property(self, event = None):
''' Copy a property of a line'''
if self._active_line not in self._line_to_struc.keys():
tk.messagebox.showinfo('No properties', 'This line does not have properties.')
return
else:
self.__copied_line_prop = self._active_line
def paste_property(self, event = None):
''' Paste property to line '''
if self._active_line not in self._line_to_struc.keys():
if self._line_to_struc[self.__copied_line_prop][5] is not None:
self.new_structure(cylinder_return=self._line_to_struc[self.__copied_line_prop][5])
else:
self.new_structure(pasted_structure=self._line_to_struc[self.__copied_line_prop][0])
elif self._line_to_struc[self.__copied_line_prop][5] is not None:
self.new_structure(cylinder_return=self._line_to_struc[self.__copied_line_prop][5])
elif self._line_to_struc[self._active_line][0].Plate.get_structure_type() !=\
self._line_to_struc[self.__copied_line_prop][0].Plate.get_structure_type():
tk.messagebox.showerror('Paste error', 'Can only paste to same structure type. This is to avoid problems '
'with compartments not detecting changes to watertightness.')
return
else:
self.new_structure(pasted_structure = self._line_to_struc[self.__copied_line_prop][0])
self.update_frame()
def delete_properties_pressed(self, event = None, line = None):
action_taken = False
if line != None:
self._line_to_struc.pop(line)
self._state_logger.pop(line)
action_taken = True
elif self._active_line != '' and self._active_line in self._line_to_struc.keys():
self._line_to_struc.pop(self._active_line)
self._state_logger.pop(self._active_line)
action_taken = True
if action_taken:
for line, obj in self._line_to_struc.items():
obj[0].need_recalc = True
self.update_frame()
def delete_all_tanks(self):
'''
Delete the tank that has been selected in the Listbox
:return:
'''
#if self._grid_calc != None:
self._tank_dict = {}
self._compartments_listbox.delete(0,'end')
self._main_grid.clear()
self._grid_calc = None
if self.__returned_load_data is not None:
map(self.on_close_load_window, self.__returned_load_data)
# else:
# pass
self._center_of_buoyancy = dict() # Resetting dict
self.update_frame()
def set_selected_variables(self, line):
'''
Setting the properties in the entry fields to the specified values.
'''
if line in self._line_to_struc:
all_dict = self._line_to_struc[line][0].get_main_properties()
main_dict = {}
for key, val in all_dict['main dict'].items():
main_dict[key] = [0, val[1]] if val[0] is None else val
self._new_buckling_min_press_adj_spans.set(main_dict['minimum pressure in adjacent spans'][0])
self._new_buckling_lf_stresses.set(main_dict['load factor on stresses'][0])
self._new_buckling_stf_end_support.set(main_dict['stiffener end support'][0])
self._new_buckling_girder_end_support.set(main_dict['girder end support'][0])
self._new_buckling_tension_field.set(main_dict['tension field'][0])
self._new_buckling_effective_against_sigy.set(main_dict['plate effective agains sigy'][0])
self._new_buckling_length_factor_stf.set(main_dict['buckling length factor stf'][0])
self._new_buckling_length_factor_girder.set(main_dict['buckling length factor girder'][0])
self._new_buckling_km3.set(main_dict['km3'][0])
self._new_buckling_km2.set(main_dict['km2'][0])
self._new_buckling_stf_dist_bet_lat_supp.set(main_dict['stiffener distance between lateral support'][0])
self._new_buckling_girder_dist_bet_lat_supp.set(main_dict['girder distance between lateral support'][0])
self._new_buckling_fab_method_stf.set(main_dict['fabrication method stiffener'][0])
self._new_buckling_fab_method_girder.set(main_dict['fabrication method girder'][0])
self._new_pressure_side.set(main_dict['pressure side'][0])
self._new_panel_length_Lp.set(main_dict['panel length, Lp'][0])
self._new_calculation_domain.set(main_dict['calculation domain'][0])
for idx, properties in enumerate([all_dict['Plate'], all_dict['Stiffener'], all_dict['Girder']]):
if properties is None:
continue
if idx == 0:
self._new_material.set(round(properties['mat_yield'][0]/1e6,5))
self._new_material_factor.set(properties['mat_factor'][0])
self._new_field_len.set(round(properties['span'][0]*1000,5))
self._new_plate_thk.set(round(properties['plate_thk'][0]*1000,5))
self._new_plate_kpp.set(properties['plate_kpp'][0])
self._new_sigma_y1.set(round(properties['sigma_y1'][0],1))
self._new_sigma_y2.set(round(properties['sigma_y2'][0],1))
self._new_sigma_x1.set(round(properties['sigma_x1'][0],1))
self._new_sigma_x2.set(round(properties['sigma_x2'][0], 1))
self._new_tauxy.set(round(properties['tau_xy'][0],1))
self._new_stucture_type.set(properties['structure_type'][0])
# try:
# self._new_pressure_side.set(properties['press_side'][0])
# except KeyError:
# self._new_pressure_side.set('both sides')
self._new_zstar_optimization.set(properties['zstar_optimization'][0])
self._new_puls_method.set(properties['puls buckling method'][0])
self._new_puls_panel_boundary.set(properties['puls boundary'][0])
self._new_buckling_stf_end_support.set(properties['puls stiffener end'][0])
self._new_puls_sp_or_up.set(properties['puls sp or up'][0])
self._new_puls_up_boundary.set(properties['puls up boundary'][0])
if idx == 1:
self._new_stf_spacing.set(round(properties['spacing'][0] * 1000, 5))
self._new_stf_kps.set(properties['stf_kps'][0])
self._new_stf_km1.set(properties['stf_km1'][0])
self._new_stf_km2.set(properties['stf_km2'][0])
self._new_stf_km3.set(properties['stf_km3'][0])
self._new_stf_web_h.set(round(properties['stf_web_height'][0]*1000,5))
self._new_stf_web_t.set(round(properties['stf_web_thk'][0]*1000,5))
self._new_stf_fl_w.set(round(properties['stf_flange_width'][0]*1000,5))
self._new_stf_fl_t.set(round(properties['stf_flange_thk'][0]*1000,5))
self._new_stf_type.set(properties['stf_type'][0])
if idx == 2:
self._new_girder_web_h.set(round(properties['stf_web_height'][0]*1000,5))
self._new_girder_web_t.set(round(properties['stf_web_thk'][0]*1000,5))
self._new_girder_fl_w.set(round(properties['stf_flange_width'][0]*1000,5))
self._new_girder_fl_t.set(round(properties['stf_flange_thk'][0]*1000,5))
self._new_girder_type.set(properties['stf_type'][0])
if self._line_to_struc[self._active_line][5] is not None:
cylobj = self._line_to_struc[self._active_line][5]
all_dicts = cylobj.get_all_properties()
# Shell data input
shell_dict = all_dicts['Shell']
self._new_shell_thk.set(shell_dict['plate_thk'][0]*1000)
self._new_shell_radius.set(shell_dict['radius'][0]*1000)
self._new_shell_dist_rings.set(shell_dict['distance between rings, l'][0]*1000)
self._new_shell_length.set(shell_dict['length of shell, L'][0]*1000)
self._new_shell_tot_length.set(shell_dict['tot cyl length, Lc'][0]*1000)
self._new_shell_k_factor.set(shell_dict['eff. buckling lenght factor'][0])
self._new_shell_yield.set(shell_dict['mat_yield'][0]/1e6)
main_dict_cyl = all_dicts['Main class']
self._new_shell_sasd.set(main_dict_cyl['sasd'][0]/1e6)
self._new_shell_smsd .set(main_dict_cyl['smsd'][0]/1e6)
self._new_shell_tTsd.set(main_dict_cyl['tTsd'][0]/1e6)
self._new_shell_tQsd.set(main_dict_cyl['tQsd'][0]/1e6)
self._new_shell_psd.set(main_dict_cyl['psd'][0]/1e6)
self._new_shell_shsd.set(main_dict_cyl['shsd'][0]/1e6)
self._new_calculation_domain.set(CylinderAndCurvedPlate.geomeries[main_dict_cyl['geometry'][0]])
self._new_shell_mat_factor.set(main_dict_cyl['material factor'][0])
self._new_shell_ring_stf_fab_method.set(main_dict_cyl['fab method ring stf'][0])
self._new_shell_ring_frame_fab_method.set(main_dict_cyl['fab method ring girder'][0])
self._new_shell_e_module.set(main_dict_cyl['E-module'][0])
self._new_shell_poisson.set(main_dict_cyl['poisson'][0])
self._new_shell_yield.set(main_dict_cyl['mat_yield'][0]/1e6)
self._new_shell_ring_frame_length_between_girders.set(main_dict_cyl['length between girders'][0]*1000)
self._new_shell_panel_spacing.set(main_dict_cyl['panel spacing, s'][0]*1000)
self._new_shell_exclude_ring_stf.set(main_dict_cyl['ring stf excluded'][0])
self._new_shell_exclude_ring_frame.set(main_dict_cyl['ring frame excluded'][0])
self._new_shell_uls_or_als.set(main_dict_cyl['ULS or ALS'][0])
self._new_shell_end_cap_pressure_included.set(main_dict_cyl['end cap pressure'][0])
if cylobj.LongStfObj is not None:
# Longitudinal stiffener input
long_dict = all_dicts['Long. stf.']
self._new_stf_spacing.set(long_dict['spacing'][0]*1000)
self._new_stf_web_h.set(long_dict['stf_web_height'][0]*1000)
self._new_stf_web_t.set(long_dict['stf_web_thk'][0]*1000)
self._new_stf_fl_w.set(long_dict['stf_flange_width'][0]*1000)
self._new_stf_fl_t.set(long_dict['stf_flange_thk'][0]*1000)
self._new_stf_type.set(long_dict['stf_type'][0])
self._new_field_len.set(long_dict['span'][0]*1000)
self._new_shell_yield.set(long_dict['mat_yield'][0]/1e6)
self._new_panel_or_shell.set('shell')
if cylobj.RingStfObj is not None:
ring_stf_dict = all_dicts['Ring stf.']
self._new_shell_ring_stf_hw.set(ring_stf_dict['stf_web_height'][0]*1000)
self._new_shell_ring_stf_tw.set(ring_stf_dict['stf_web_thk'][0]*1000)
self._new_shell_ring_stf_b.set(ring_stf_dict['stf_flange_width'][0]*1000)
self._new_shell_ring_stf_tf.set(ring_stf_dict['stf_flange_thk'][0]*1000)
self._new_shell_ring_stf_type.set(ring_stf_dict['stf_type'][0])
self._new_shell_yield.set(ring_stf_dict['mat_yield'][0]/1e6)
self._new_panel_or_shell.set('shell')
if cylobj.RingFrameObj is not None:
ring_frame_dict = all_dicts['Ring frame']
self._new_shell_ring_frame_hw.set(ring_frame_dict ['stf_web_height'][0]*1000)
self._new_shell_ring_frame_tw.set(ring_frame_dict ['stf_web_thk'][0]*1000)
self._new_shell_ring_frame_b.set(ring_frame_dict ['stf_flange_width'][0]*1000)
self._new_shell_ring_frame_tf.set(ring_frame_dict ['stf_flange_thk'][0]*1000)
self._new_shell_ring_frame_type.set(ring_frame_dict ['stf_type'][0])
self._new_shell_yield.set(ring_frame_dict ['mat_yield'][0]/1e6)
self._new_panel_or_shell.set('shell')
def get_highest_pressure(self, line, limit_state = 'ULS'):
'''
Returning the highest pressure of a line.
:return:
'''
all_press = list()
if limit_state == 'ULS':
pressures = self.calculate_all_load_combinations_for_line(line)
slm_red, psl, slm_red_pl, slm_red_stf = 1, 0, 1, 1
for key, value in pressures.items():
if key != 'slamming':
all_press.append(max(value))
else:
if value is not None:
for load in self._line_to_struc[line][3]:
if load is not None:
if load.get_load_condition() == 'slamming':
slm_red_pl = load.get_slamming_reduction_plate()
slm_red_stf = load.get_slamming_reduction_stf()
psl = max(value)
return {'normal':max(all_press), 'slamming': psl, 'slamming plate reduction factor': slm_red_pl,
'slamming stf reduction factor': slm_red_stf}
elif limit_state == 'FLS':
pass
else:
return {'normal':0, 'slamming': 0}
def get_fatigue_pressures(self, line, accelerations = (0, 0, 0)):
''' Retruning a dictionary of internal and external pressures. '''
loaded_exist = False
ballast_exist = False
part_exist = False
for load in self._line_to_struc[line][3]:
if load.get_limit_state() == 'FLS':
if load.get_load_condition() == 'loaded':
loaded_exist = True
elif load.get_load_condition() == 'ballast':
ballast_exist = True
elif load.get_load_condition() == 'part':
part_exist = True
else:
pass
fls_exist = (loaded_exist, ballast_exist, part_exist)
pressures = {}
pressures['p_ext'] = {'loaded': 0, 'ballast': 0, 'part': 0}
for load in self._line_to_struc[line][3]:
if load.get_limit_state() == 'FLS':
for exist_i in range(len(fls_exist)):
if fls_exist[exist_i] and load.get_load_condition()=='loaded':
pressures['p_ext']['loaded'] = load.get_calculated_pressure(self.get_pressures_calc_coord(line),
accelerations[0],
self._line_to_struc[line][
0].Plate.get_structure_type())
if fls_exist[exist_i] and load.get_load_condition() == 'ballast':
pressures['p_ext']['ballast'] = load.get_calculated_pressure(self.get_pressures_calc_coord(line),
accelerations[1],
self._line_to_struc[line][
0].Plate.get_structure_type())
if fls_exist[exist_i] and load.get_load_condition() == 'part':
pressures['p_ext']['part'] = load.get_calculated_pressure(self.get_pressures_calc_coord(line),
accelerations[2],
self._line_to_struc[line][
0].Plate.get_structure_type())
if self._tank_dict == {}:
compartments = []
else:
compartments = [self._tank_dict['comp'+str(tank)] for tank in self.get_compartments_for_line(line)]
pressures['p_int'] = {'loaded':0, 'ballast':0, 'part':0}
for comp in compartments:
if fls_exist[0] and comp.is_loaded_condition():
pressures['p_int']['loaded'] = comp.get_calculated_pressure(self.get_pressures_calc_coord(line),
accelerations[0])
if fls_exist[1] and comp.is_ballast_condition():
pressures['p_int']['ballast'] = comp.get_calculated_pressure(self.get_pressures_calc_coord(line),
accelerations[1])
if fls_exist[2] and any([comp.is_loaded_condition(),comp.is_ballast_condition()]):
pressures['p_int']['part'] = comp.get_calculated_pressure(self.get_pressures_calc_coord(line),
accelerations[2])*0.5
return pressures
def get_compartments_for_line(self, line):
'''
Finding the compartment connected to a specified line.
:return:
'''
start_point = self._point_dict['point' + str(self._line_dict[line][0])]
end_point = self._point_dict['point' + str(self._line_dict[line][1])]
mid_point = self._main_grid.get_mid_point(self.get_grid_coord_from_points_coords(start_point),
self.get_grid_coord_from_points_coords(end_point))
return list(filter(lambda x: x > 1, self._main_grid.get_adjacent_values(mid_point)))
def get_compartments_for_line_duplicates(self, line):
'''
Finding the compartment connected to a specified line.
:return:
'''
start_point = self._point_dict['point' + str(self._line_dict[line][0])]
end_point = self._point_dict['point' + str(self._line_dict[line][1])]
mid_point = self._main_grid.get_mid_point(self.get_grid_coord_from_points_coords(start_point),
self.get_grid_coord_from_points_coords(end_point))
return list(filter(lambda x: x > 1, self._main_grid.get_adjacent_values_duplicates(mid_point)))
def get_point_canvas_coord(self, point_no):
'''
Returning the canvas coordinates of the point. This value will change with slider.
'''
point_coord_x = self._canvas_draw_origo[0] + self._point_dict[point_no][0] * self._canvas_scale
point_coord_y = self._canvas_draw_origo[1] - self._point_dict[point_no][1] * self._canvas_scale
return [point_coord_x, point_coord_y]
def get_point_actual_coord(self, point_no):
'''
Returning actutual (real world coordinates of a point.
'''
return [self._point_dict[point_no][0], self._point_dict[point_no][1]]
def get_actual_elevation_from_grid_coords(self,grid_col):
'''
Converts coordinates
:param canv_elevation:
:return:
'''
y_coord = (self._main_grid.get_grid_height() - grid_col)/self._base_scale_factor
self._main_grid.get_grid_height()
return y_coord
def get_grid_coord_from_points_coords(self, point_coord):
'''
Converts coordinates to be used in the grid. Returns (row,col). This value will not change with slider.
:param point:
:return:
'''
row = self._canvas_base_origo[1] - point_coord[1]*self._base_scale_factor
col = point_coord[0]*self._base_scale_factor
return (row,col)
def get_point_coords_from_grid_coords(self, grid_coord):
'''
Converts coordinates to be used in the as points. Returns (x,y). This value will not change with slider.
:param point:
:return:
'''
x_coord = grid_coord[1]/self._base_scale_factor
y_coord = (self._main_grid.get_grid_height() - grid_coord[0])/self._base_scale_factor
self._main_grid.get_grid_height()
self._main_grid.get_grid_width()
return x_coord,y_coord
def get_canvas_coords_from_point_coords(self, actual_coords):
'''
Returns tuple of canvas points from actual (x,y)
:param actual_coords:
:return:
'''
canvas_coord_x = self._canvas_draw_origo[0] + actual_coords[0] * self._canvas_scale
canvas_coord_y = self._canvas_draw_origo[1] - actual_coords[1] * self._canvas_scale
return (canvas_coord_x, canvas_coord_y)
def get_line_low_elevation(self,line):
'''
Finding elevation of a line. Used to calculate pressures in load combinations.
:param line:
:return:
'''
return min([self._point_dict['point'+str(point)][1] for point in self._line_dict[line]])
def get_line_radial_mid(self,line):
'''
Getting the horizontal coordinates in the middle of a line.
:param line:
:return:
'''
return sum([self._point_dict['point' + str(point)][0] for point in self._line_dict[line]])/2
def get_pressures_calc_coord(self, line):
''' Returning coordinates of the pressures calculation basis of a selected line. '''
p1 = self._point_dict['point'+str(self._line_dict[line][0])]
p2 = self._point_dict['point'+str(self._line_dict[line][1])]
if p1[1] <= p2[1]:
start_point = p1
end_point = p2
elif p1[1] == p2[1]:
if p1[0] <= p2[0]:
start_point = p1
end_point = p2
else:
start_point = p2
end_point = p1
else:
start_point = p2
end_point = p1
vector = [end_point[0]-start_point[0], end_point[1]-start_point[1]]
return start_point[0]+vector[0]*1/3, start_point[1]+vector[1]*1/3
def get_points(self):
return self._point_dict
def get_closest_point(self,given_point):
'''
Finding the closest point to av given value.
Real coordinates used (meters).
Returning point name, coordinates and distance.
:param coordx:
:param coordy:
:return:
'''
current_dist = float('inf')
current_point = None
for point,coords in self._point_dict.items():
if dist([coords[0],coords[1]], [given_point[0],given_point[1]]) < current_dist:
current_dist = dist([coords[0],coords[1]], [given_point[0],given_point[1]])
current_point = point
return current_point, self._point_dict[current_point], current_dist
def get_lines(self):
return self._line_dict
def get_unique_plates_and_beams(self):
beams, plates = list(), list()
if self._line_to_struc != {}:
for line, data in self._line_to_struc.items():
if data[0].Stiffener is not None:
this_beam = data[0].Stiffener.get_beam_string()
this_plate = data[0].Stiffener.get_pl_thk()*1000
if this_beam not in beams:
beams.append(this_beam)
if this_plate not in plates:
plates.append(this_plate)
return {'plates':plates, 'beams': beams}
def make_point_point_line_string(self, point1, point2):
'''
For a line, this method makes a string 'p1p2' and 'p2p1'. Ensuring that lines are not overwritten.
:param point1:
:param point2:
:return:
'''
return ['p' + str(point1) + 'p' + str(point2), 'p' + str(point2) + 'p' + str(point1)]
def reset(self):
'''
Resetting the script.
:return:
'''
self._line_dict = {}
self._point_dict = {}
self._line_to_struc = {}
self._line_point_to_point_string = []
self._load_dict = {}
self._new_load_comb_dict = {}
self._line_is_active = False
self._active_line = ''
self._point_is_active = False
self._active_point = ''
self.delete_all_tanks()
self._main_canvas.delete('all')
self._prop_canvas.delete('all')
self._result_canvas.delete('all')
self._pending_grid_draw = {}
self._p1_p2_select = False
self._line_is_active = False # True when a line is clicked
self._active_line = '' # Name of the clicked point
self._point_is_active = False # True when a point is clicked
self._active_point = '' # Name of the clicked point
self.controls() # Function to activate mouse clicks
self._line_point_to_point_string = [] # This one ensures that a line is not created on top of a line
self._accelerations_dict = {'static':9.81, 'dyn_loaded':0, 'dyn_ballast':0}
self._multiselect_lines = []
self._PULS_results = None
self.update_frame()
# Initsializing the calculation grid used for tank definition
self._main_grid = grid.Grid(self._grid_dimensions[0], self._grid_dimensions[1])
self._grid_calc = None
def controls(self):
'''
Specifying the controls to be used.
:return:
'''
self._main_canvas.bind('<Button-1>', self.button_1_click)
self._main_canvas.bind('<Button-2>', self.button_2_click)
self._main_canvas.bind('<Button-3>', self.button_3_click)
self._main_canvas.bind("<B2-Motion>", self.button_2_click_and_drag)
self._main_canvas.bind("<MouseWheel>", self.mouse_scroll)
#self._prop_canvas.bind("<MouseWheel>", self.mouse_scroll)
self._parent.bind('<Control-z>', self.undo)
#self._parent.bind('<Control-y>', self.redo)
#self._parent.bind('<Control-p>', self.delete_point)
self._parent.bind('<Control-l>', self.delete_line)
self._parent.bind('<Control-p>', self.copy_point)
self._parent.bind('<Control-m>', self.move_point)
self._parent.bind('<Control-n>', self.move_line)
self._parent.bind('<Control-a>', self.select_all_lines)
self._parent.bind('<Control-t>', self.select_all_lines)
self._parent.bind('<Control-q>', self.new_line)
self._parent.bind('<Control-s>', self.new_structure)
self._parent.bind('<Delete>', self.delete_key_pressed)
self._parent.bind('<Control-Delete>', self.delete_properties_pressed)
self._parent.bind('<Control-e>', self.copy_property)
self._parent.bind('<Control-d>', self.paste_property)
self._parent.bind('<Left>', self.left_arrow)
self._parent.bind('<Right>', self.right_arrow)
self._parent.bind('<Down>', self.up_arrow)
self._parent.bind('<Up>', self.down_arrow)
self._parent.bind("<Alt-s>", self.save_no_dialogue)
#self._parent.bind('<Enter>', self.enter_key_pressed)
def left_arrow(self, event):
if self._active_line == '':
return
else:
idx = list(self._line_dict.keys()).index(self._active_line)
if idx -1 >= 0:
self._active_line =list(self._line_dict.keys())[idx-1]
else:
self._active_line = list(self._line_dict.keys())[-1]
self.update_frame()
def right_arrow(self, event):
if self._active_line == '':
return
else:
idx = list(self._line_dict.keys()).index(self._active_line)
if idx + 1 < len(list(self._line_dict.keys())):
self._active_line = list(self._line_dict.keys())[idx+1]
else:
self._active_line = list(self._line_dict.keys())[0]
self.update_frame()
def up_arrow(self, event):
if self._active_point == '':
return
else:
idx = list(self._point_dict.keys()).index(self._active_point)
if idx - 1 >= 0:
self._active_point = list(self._point_dict.keys())[idx - 1]
else:
self._active_point = list(self._point_dict.keys())[-1]
self.update_frame()
def down_arrow(self, event):
if self._active_point == '':
return
else:
idx = list(self._point_dict.keys()).index(self._active_point)
if idx + 1 < len(list(self._point_dict.keys())):
self._active_point = list(self._point_dict.keys())[idx + 1]
else:
self._active_point = list(self._point_dict.keys())[0]
self.update_frame()
def select_all_lines(self, event=None):
if self._toggle_btn.config('relief')[-1] == "sunken":
for line in self._line_to_struc.keys():
if line not in self._multiselect_lines:
if event.keysym == 't':
if self._line_to_struc[line][0].Plate.get_structure_type() == self._new_stucture_type.get():
self._multiselect_lines.append(line)
else:
self._multiselect_lines.append(line)
else:
tk.messagebox.showinfo('CTRL-A and CTRL-T', 'CTRL-A and CTRL-T is used to select all lines \n'
'with the intension to change a single variable in all lines.\n'
'Press the Toggle select multiple button.')
self.update_frame()
def mouse_scroll(self,event):
if event.y < self._main_canvas.winfo_height():
self._canvas_scale += event.delta/50
self._canvas_scale = 0 if self._canvas_scale < 0 else self._canvas_scale
else:
pass
try:
state = self.get_color_and_calc_state()
except AttributeError:
state = None
self.update_frame()
def button_2_click(self, event):
self._previous_drag_mouse = [event.x, event.y]
def button_2_click_and_drag(self,event):
self._canvas_draw_origo = (self._canvas_draw_origo[0]-(self._previous_drag_mouse[0]-event.x),
self._canvas_draw_origo[1]-(self._previous_drag_mouse[1]-event.y))
self._previous_drag_mouse = (event.x,event.y)
try:
state = self.get_color_and_calc_state()
except AttributeError:
state = None
self.update_frame()
#self.draw_canvas(state=state)
def button_1_click(self, event = None):
'''
When clicking the right button, this method is called.
method is referenced in
'''
self._previous_drag_mouse = [event.x, event.y]
click_x = self._main_canvas.winfo_pointerx() - self._main_canvas.winfo_rootx()
click_y = self._main_canvas.winfo_pointery() - self._main_canvas.winfo_rooty()
self._prop_canvas.delete('all')
stop = False
self._active_line = ''
self._line_is_active = False
if len(self._line_dict) > 0:
for key, value in self._line_dict.items():
if stop:
break
coord1x = self.get_point_canvas_coord('point' + str(value[0]))[0]
coord2x = self.get_point_canvas_coord('point' + str(value[1]))[0]
coord1y = self.get_point_canvas_coord('point' + str(value[0]))[1]
coord2y = self.get_point_canvas_coord('point' + str(value[1]))[1]
vector = [coord2x - coord1x, coord2y - coord1y]
click_x_range = [ix for ix in range(click_x - 10, click_x + 10)]
click_y_range = [iy for iy in range(click_y - 10, click_y + 10)]
distance = int(dist([coord1x, coord1y], [coord2x, coord2y]))
# checking along the line if the click is witnin +- 10 around the click
for dist_mult in range(1, distance - 1):
dist_mult = dist_mult / distance
x_check = int(coord1x) + int(round(vector[0] * dist_mult, 0))
y_check = int(coord1y) + int(round(vector[1] * dist_mult, 0))
if x_check in click_x_range and y_check in click_y_range:
self._line_is_active = True
self._active_line = key
stop = True
break
self._new_delete_line.set(get_num(key))
if self._line_is_active and self._active_line not in self._line_to_struc.keys():
p1 = self._point_dict['point'+str(self._line_dict[self._active_line][0])]
p2 = self._point_dict['point'+str(self._line_dict[self._active_line][1])]
self._new_field_len.set(dist(p1,p2)*1000)
if self._toggle_btn.config('relief')[-1] == 'sunken':
if self._active_line not in self._multiselect_lines:
self._multiselect_lines.append(self._active_line)
else:
self._multiselect_lines = []
try:
state = self.get_color_and_calc_state()
except AttributeError:
state = None
self.update_frame()
self._combination_slider.set(1)
if self._line_is_active:
self._tabControl.select(self._tab_prop)
try:
self.gui_load_combinations(self._combination_slider.get())
except (KeyError, AttributeError):
pass
self.cylinder_gui_mods()
def cylinder_gui_mods(self):
if self._active_line in self._line_to_struc.keys():
if self._line_to_struc[self._active_line][5] is not None:
self._new_calculation_domain.set(CylinderAndCurvedPlate
.geomeries[self._line_to_struc[self._active_line][5].geometry])
self._new_shell_exclude_ring_stf.set(self._line_to_struc[self._active_line][5]._ring_stiffener_excluded)
self._new_shell_exclude_ring_frame.set(self._line_to_struc[self._active_line][5]._ring_frame_excluded)
self.calculation_domain_selected()
# Setting the correct optmization buttons
#'Flat plate, unstiffened', 'Flat plate, stiffened', 'Flat plate, stiffened with girder'
for dom in ['Flat plate, unstiffened', 'Flat plate, stiffened', 'Flat plate, stiffened with girder']:
for btn, placement in zip(self._optimization_buttons[dom],
self._optimization_buttons[dom + ' place']):
btn.place_forget()
for btn, placement in zip(self._optimization_buttons['cylinder'],
self._optimization_buttons['cylinder place']):
if self._gui_functional_look == 'cylinder':
placement = self._gui_functional_look_cylinder_opt
btn.place(relx = placement[0], rely= placement[1],relheight = placement[2], relwidth = placement[3])
else:
self._new_calculation_domain.set(self._line_to_struc[self._active_line][0].calculation_domain)
self.calculation_domain_selected()
dom = self._line_to_struc[self._active_line][0].calculation_domain
for btn, placement in zip(self._optimization_buttons['cylinder'],
self._optimization_buttons['cylinder place']):
btn.place_forget()
for btn, placement in zip(self._optimization_buttons[dom],
self._optimization_buttons[dom + ' place']):
btn.place(relx = placement[0], rely= placement[1],relheight = placement[2], relwidth = placement[3] )
def button_1_click_comp_box(self,event):
'''
Action when clicking the compartment box.
:param event:
:return:
'''
self._selected_tank.config(text='')
self._tank_acc_label.config(text='Accelerations [m/s^2]: ',font = self._text_size['Text 8 bold'])
if len(self._tank_dict)!=0:
current_comp = self._tank_dict['comp' + str(self._compartments_listbox.get('active'))]
self._selected_tank.config(text=str(self._compartments_listbox.get('active')))
self._new_density.set(self._tank_dict['comp' + str(self._compartments_listbox.get('active'))]
.get_density())
self._new_overpresure.set(self._tank_dict['comp' + str(self._compartments_listbox.get('active'))]
.get_overpressure())
self._new_content_type.set(self._tank_dict['comp' + str(self._compartments_listbox.get('active'))]
.get_content())
self._new_max_el.set(self._tank_dict['comp' + str(self._compartments_listbox.get('active'))]
.get_highest_elevation())
self._new_min_el.set(self._tank_dict['comp' + str(self._compartments_listbox.get('active'))]
.get_lowest_elevation())
acc = (self._tank_dict['comp' + str(self._compartments_listbox.get('active'))].get_accelerations())
self._tank_acc_label.config(text='Accelerations [m/s^2]: \n'
+'static: ' + str(acc[0])+' , '
+'dynamic loaded: ' + str(acc[1])+' , '
+'dynamic ballast: ' + str(acc[2]), font = self._text_size['Text 8 bold'])
def button_3_click(self, event = None):
'''
Identifies enclosed compartments in the canvas.
:return:
'''
click_x = self._main_canvas.winfo_pointerx() - self._main_canvas.winfo_rootx()
click_y = self._main_canvas.winfo_pointery() - self._main_canvas.winfo_rooty()
self._pt_frame.place_forget()
self._point_is_active = False
margin = 10
self._active_point = ''
for point, coords in self._point_dict.items():
point_coord = self.get_point_canvas_coord(point)
if point_coord[0]-margin < click_x < point_coord[0]+margin and\
point_coord[1]-margin < click_y < point_coord[1]+margin:
self._active_point = point
self._point_is_active = True
self._new_delete_point.set(get_num(point))
if not self._p1_p2_select:
self._new_line_p1.set(get_num(point))
self._p1_p2_select = True
else:
self._new_line_p2.set(get_num(point))
self._p1_p2_select = False
self._new_point_x.set(round(self._point_dict[self._active_point][0]*1000, 1))
self._new_point_y.set(round(self._point_dict[self._active_point][1]*1000, 1))
if self._toggle_btn.config('relief')[-1] == 'sunken':
if len(self._multiselect_lines) != 0:
self._multiselect_lines.pop(-1)
self.update_frame()
def draw_point_frame(self):
''' Frame to define brackets on selected point. '''
pt_canvas = tk.Canvas(self._pt_frame,height=100,width=100,background=self._style.lookup('TFrame', 'background'))
pt_canvas.place(relx=0, rely=0)
pt_canvas.create_oval(45,45,55,55,fill='red')
new_left_br = tk.IntVar()
new_right_br = tk.IntVar()
new_upper_br = tk.IntVar()
new_lower_br = tk.IntVar()
wid = 5
ent_left = ttk.Entry(self._pt_frame,textvariable=new_left_br, width=wid,
)
ent_right = ttk.Entry(self._pt_frame, textvariable=new_right_br, width=wid,
)
ent_upper = ttk.Entry(self._pt_frame, textvariable=new_upper_br, width=wid,
)
ent_lower = ttk.Entry(self._pt_frame, textvariable=new_lower_br, width=wid,
)
ent_lower.place(relx=0.018229167, rely=0.009259259)
ent_upper.place(relx=0.018229167, rely=0.069444444)
ent_left.place(relx=0.002604167, rely=0.037037037)
ent_right.place(relx=0.03125, rely=0.037037037)
def save_no_dialogue(self, event = None, backup = False):
if backup:
self.savefile(filename=os.path.join(self._root_dir, '../backup.txt'), backup = backup)
return
if self.__last_save_file is not None:
self.savefile(filename=self.__last_save_file)
else:
tk.messagebox.showerror('Save error', 'No saves in this session yet.')
def savefile(self, filename = None, backup = False):
'''
Saving to a file using JSON formatting.
'''
if filename is None:
save_file = filedialog.asksaveasfile(mode="w", defaultextension=".txt")
if save_file is None: # ask saveasfile return `None` if dialog closed with "cancel".
return
if not backup:
self.__last_save_file = save_file.name
else:
try:
save_file = open(filename, mode='w')
except FileNotFoundError:
save_file = open(filename.replace('',''), mode='w')
structure_properties = {}
shell_structure_properties = {}
for key, value in self._line_to_struc.items():
structure_properties[key] = value[0].get_main_properties()
shell_structure_properties[key] = None if value[5] is None else value[5].get_all_properties()
fatigue_properties = {}
for key, value in self._line_to_struc.items():
if value[2] != None:
try:
fatigue_properties[key] = value[2].get_fatigue_properties()
except AttributeError:
fatigue_properties[key] = None
else:
fatigue_properties[key] = None
load_properties = {}
for load, data in self._load_dict.items():
load_properties[load] = [data[0].get_load_parmeters(), data[1]]
tank_properties = {}
tank_properties['grid'] = self._main_grid.export_grid()
tank_properties['search_data'] = self._main_grid.bfs_search_data
for tank,data in self._tank_dict.items():
tank_properties[tank] = data.get_parameters()
load_combiantions = {}
counter = 0
for name, data in self._new_load_comb_dict.items():
load_combiantions[counter] = [name,data[0].get(),data[1].get(),data[2].get()]
counter+=1
export_all = {}
export_all['project information'] = self._project_information.get('1.0', tk.END)
export_all['theme'] = self._current_theme
export_all['point_dict'] = self._point_dict
export_all['line_dict'] = self._line_dict
export_all['structure_properties'] = structure_properties
export_all['shell structure properties'] = shell_structure_properties
export_all['load_properties'] = load_properties
export_all['accelerations_dict'] = self._accelerations_dict
export_all['load_combinations'] = load_combiantions
export_all['tank_properties'] = tank_properties
export_all['fatigue_properties'] = fatigue_properties
#export_all['buckling type'] = self._new_buckling_slider.get()
export_all['buckling method'] = self._new_buckling_method.get()
if self._PULS_results is not None:
export_all['PULS results'] = self._PULS_results.get_run_results()
export_all['PULS results']['sheet location'] = self._PULS_results.puls_sheet_location
export_all['shifting'] = {'shifted checked': self._new_shifted_coords.get(),
'shift hor': self._new_shift_viz_coord_hor.get(),
'shift ver': self._new_shift_viz_coord_ver.get()}
export_all['Weight and COG'] = self._weight_logger
json.dump(export_all, save_file)#, sort_keys=True, indent=4)
save_file.close()
if not backup:
self._parent.wm_title('| ANYstructure | ' + save_file.name)
#self.update_frame()
def openfile(self, defined = None, alone = False):
'''
Opens a file with data (JSON).
'''
if defined == None:
imp_file = filedialog.askopenfile(mode='r', defaultextension=".txt")
if imp_file is None: # asksaveasfile return `None` if dialog closed with "cancel".
return
else:
imp_file = open(defined,'r')
imported = json.load(imp_file)
self.reset()
if 'project information' in imported.keys():
self._project_information.delete("1.0", tk.END)
self._project_information.insert(1.0, imported['project information'])
else:
self._project_information.delete("1.0", tk.END)
self._project_information.insert(1.0, 'No information on project provided. Input here.')
if 'shifting' in imported.keys():
self._new_shifted_coords.set(imported['shifting']['shifted checked'])
self._new_shift_viz_coord_hor.set(imported['shifting']['shift hor'])
self._new_shift_viz_coord_ver.set(imported['shifting']['shift ver'])
else:
pass
if 'theme' in imported.keys():
self.set_colors(imported['theme'])
self._point_dict = imported['point_dict']
self._line_dict = imported['line_dict']
struc_prop = imported['structure_properties']
old_save_file = False
for line, lines_prop in struc_prop.items():
if len(lines_prop) > 10:
# Loading a file (pre 3.4)
old_save_file = True
self._line_to_struc[line] = [None, None, None, [], {}, None]
self._line_point_to_point_string.append(
self.make_point_point_line_string(self._line_dict[line][0], self._line_dict[line][1])[0])
self._line_point_to_point_string.append(
self.make_point_point_line_string(self._line_dict[line][0], self._line_dict[line][1])[1])
if 'structure_types' not in lines_prop.keys():
lines_prop['structure_types'] = [self._structure_types, ' ']
if 'zstar_optimization' not in lines_prop.keys():
lines_prop['zstar_optimization'] = [self._new_zstar_optimization.get(), '']
if 'puls buckling method' not in lines_prop.keys():
lines_prop['puls buckling method'] = [self._new_puls_method.get(), '']
if 'puls boundary' not in lines_prop.keys():
lines_prop['puls boundary'] = [self._new_puls_panel_boundary.get(), '']
if 'puls stiffener end' not in lines_prop.keys():
lines_prop['puls stiffener end'] = [self._new_buckling_stf_end_support.get(), '']
if 'puls sp or up' not in lines_prop.keys():
lines_prop['puls sp or up'] = [self._new_puls_sp_or_up.get(), '']
if 'puls up boundary' not in lines_prop.keys():
lines_prop['puls up boundary'] = [self._new_puls_up_boundary.get(), '']
if 'mat_factor' not in lines_prop.keys():
lines_prop['mat_factor'] = [self._new_material_factor.get(), '']
# Sigma x1/x2 is missing before 3.4
if 'sigma_x' in lines_prop.keys():
lines_prop['sigma_x1'] = lines_prop['sigma_x']
lines_prop['sigma_x2'] = lines_prop['sigma_x']
lines_prop.pop('sigma_x')
if old_save_file: #need to get some basic information
# Import issues
try:
import example_data as ex
except ModuleNotFoundError:
# This is due to pyinstaller issues.
import any_files.example_data as ex
#import ANYstructure.any_files.example_data as ex
main_dict = ex.prescriptive_main_dict
map_end = {'C': 'Continuous', 'S': 'Sniped'}
lines_prop['puls stiffener end'] = [map_end[lines_prop['puls stiffener end'][0]],
lines_prop['puls stiffener end'][1]]
main_dict['material yield'] = [355e6, 'Pa']
main_dict['load factor on stresses'] = [1, '']
main_dict['load factor on pressure'] = [1, '']
main_dict['buckling method'] = [lines_prop['puls buckling method'], '']
main_dict['stiffener end support'] = lines_prop['puls stiffener end'] # 'Continuous'
main_dict['girder end support'] = ['Continuous', ''] # 'Continuous'
dom = 'Flat plate, stiffened' if lines_prop['puls sp or up'][0] == 'SP' else 'Flat plate, unstiffened'
main_dict['calculation domain'] = [dom, '']
map_side = {'p': 'plate side', 's': 'stiffener side'}
if 'press_side' in lines_prop.keys():
lines_prop['press_side'] = [map_side[lines_prop['press_side'][0]], '']
else:
lines_prop['press_side'] = 'both sides'
lines_prop['panel or shell'] = 'panel'
#lines_prop['tension field'] = 'allowed'
self._line_to_struc[line][0] = AllStructure(Plate=CalcScantlings(lines_prop),
Stiffener=None if dom == 'Flat plate, unstiffened'
else CalcScantlings(lines_prop),
Girder=None, main_dict=main_dict)
if imported['fatigue_properties'][line] is not None:
self._line_to_struc[line][2] = CalcFatigue(lines_prop,
imported['fatigue_properties'][line])
else:
self._line_to_struc[line][2] = None
# Recording sections.
self._sections = add_new_section(self._sections, struc.Section(lines_prop))
else:
self._line_to_struc[line][0] = AllStructure(Plate=None if lines_prop['Plate'] is None
else CalcScantlings(lines_prop['Plate']),
Stiffener=None if lines_prop['Stiffener'] is None
else CalcScantlings(lines_prop['Stiffener']),
Girder=None if lines_prop['Girder'] is None
else CalcScantlings(lines_prop['Girder']),
main_dict=lines_prop['main dict'])
if imported['fatigue_properties'][line] is not None:
self._line_to_struc[line][2] = CalcFatigue(lines_prop['Stiffener'],
imported['fatigue_properties'][line])
else:
self._line_to_struc[line][2] = None
# Recording sections.
if self._line_to_struc[line][0].Stiffener is not None:
self._sections = add_new_section(self._sections, struc.Section(lines_prop['Stiffener']))
if 'shell structure properties' in imported.keys():
if imported['shell structure properties'][line] is not None:
# need to correct the calcuation domain.
#self._new_calculation_domain.set(imported_dict['Main class'][CylinderAndCurvedPlate.geomeries])
imported_dict = imported['shell structure properties'][line]
'''
all_data = {'Main class': self.get_main_properties(),
'Shell': self._Shell.get_main_properties(),
'Long. stf.': self._LongStf.get_structure_prop(),
'Ring stf.': self.RingStfObj.get_structure_prop(),
'Ring frame': self._RingFrame.get_structure_prop()}
'''
for stuc_type in ['Long. stf.', 'Ring stf.', 'Ring frame']:
if imported_dict[stuc_type] is not None:
if 'sigma_x' in imported_dict[stuc_type].keys():
imported_dict[stuc_type]['sigma_x1'] = imported_dict[stuc_type]['sigma_x']
imported_dict[stuc_type]['sigma_x2'] = imported_dict[stuc_type]['sigma_x']
imported_dict[stuc_type].pop('sigma_x')
self._line_to_struc[line][5] = \
CylinderAndCurvedPlate(imported_dict['Main class'], shell=None if imported_dict['Shell'] is None
else Shell(imported_dict['Shell']), long_stf=None if imported_dict['Long. stf.'] is None
else Structure(imported_dict['Long. stf.']), ring_stf=None if imported_dict['Ring stf.'] is None
else Structure(imported_dict['Ring stf.']), ring_frame=None if imported_dict['Ring frame']
is None
else Structure(imported_dict['Ring frame']))
# opening the loads
variables = ['poly_third','poly_second', 'poly_first', 'poly_const', 'load_condition',
'structure_type', 'man_press', 'static_draft', 'name_of_load', 'limit_state',
'slamming mult pl', 'slamming mult stf']
if len(imported['load_properties']) != 0:
for load, data in imported['load_properties'].items():
temp_dict = {}
count_i = 0
values = data[0]
if len(values) != len(variables):
# Adding slamming multiplication factors
values.append(1)
values.append(1)
for value in values:
temp_dict[variables[count_i]]= value
count_i += 1
self._load_dict[load] = [Loads(temp_dict), data[1]]
if len(data[1]) != 0:
for main_line in self._line_dict.keys():
if main_line in data[1]:
self._line_to_struc[main_line][3].append(self._load_dict[load][0])
try:
self._accelerations_dict = imported['accelerations_dict']
except IndexError:
self._accelerations_dict = {'static':9.81, 'dyn_loaded':0, 'dyn_ballast':0}
self._new_static_acc.set(self._accelerations_dict['static'])
self._new_dyn_acc_loaded.set(self._accelerations_dict['dyn_loaded'])
self._new_dyn_acc_ballast.set(self._accelerations_dict['dyn_ballast'])
try:
for data in imported['load_combinations'].values():
name = tuple(data[0])
self._new_load_comb_dict[name] = [tk.DoubleVar(),tk.DoubleVar(),tk.IntVar()]
self._new_load_comb_dict[name][0].set(data[1]), self._new_load_comb_dict[name][1].set(data[2])
self._new_load_comb_dict[name][2].set(data[3])
except IndexError:
for data in imported['load_combinations'].values():
name = tuple(data[0])
self._new_load_comb_dict[name] = [tk.DoubleVar(),tk.IntVar()]
self._new_load_comb_dict[name][0].set(data[1]), self._new_load_comb_dict[name][1].set(data[2])
try:
self._main_grid.import_grid(imported['tank_properties']['grid'])
self._grid_calc = grid_window.CreateGridWindow(self._main_grid, self._canvas_dim,
self._pending_grid_draw, self._canvas_base_origo)
tank_inp = dict()
if 'search_data' in imported['tank_properties'].keys():
try:
for key, value in imported['tank_properties']['search_data'].items():
tank_inp[int(key)] = value
self._main_grid.bfs_search_data = tank_inp
self._grid_calc.bfs_search_data = tank_inp
except AttributeError:
self._main_grid.bfs_search_data = None
self._grid_calc.bfs_search_data = None
else:
self._main_grid.bfs_search_data = None
self._grid_calc.bfs_search_data = None
for comp_no in range(2, int(self._main_grid.get_highest_number_in_grid())+1):
self._compartments_listbox.insert('end',comp_no)
self._tank_dict['comp' + str(comp_no)] = Tanks(imported['tank_properties']['comp' + str(comp_no)])
except IndexError:
for line_name, point_no in self._line_dict.items():
point_coord_x = self._canvas_base_origo[0] + self._point_dict[point_no][0] * self._canvas_scale
point_coord_y = self._canvas_base_origo[1] - self._point_dict[point_no][1] * self._canvas_scale
self.grid_operations(line_name, [point_coord_x,point_coord_y])
if 'PULS results' in list(imported.keys()):
self._PULS_results = PULSpanel()
if 'sheet location' in imported['PULS results'].keys():
self._PULS_results.puls_sheet_location = imported['PULS results']['sheet location']
imported['PULS results'].pop('sheet location')
self._PULS_results.set_run_results(imported['PULS results'])
if 'buckling method' in list(imported.keys()):
#options = ['DNV-RP-C201 - prescriptive', 'DNV PULS', 'ML-CL (PULS based)']
self._new_buckling_method.set(imported['buckling method'])
# Setting the scale of the canvas
points = self._point_dict
if len(points) != 0:
highest_y = max([coord[1] for coord in points.values()])
highest_x = max([coord[0] for coord in points.values()])
else:
highest_x = 1
highest_y = 1
if not any([highest_x == 0, highest_y == 0]):
self._canvas_scale = min(800 / highest_y, 800 / highest_x, 15)
# if 'buckling type' in imported.keys():
# self._new_buckling_slider.set(imported['buckling type'])
# self._buckling_slider.set(imported['buckling type'])
if 'Weight and COG' in imported.keys():
self._weight_logger = imported['Weight and COG']
self.get_cob()
imp_file.close()
self._parent.wm_title('| ANYstructure | ' + imp_file.name)
self.update_frame()
def restore_previous(self):
if os.path.isfile(os.path.join(self._root_dir, '../backup.txt')):
self.openfile(defined=os.path.join(self._root_dir, '../backup.txt'))
def open_example(self, file_name = 'ship_section_example.txt'):
''' Open the example file. To be used in help menu. '''
if os.path.isfile(file_name) :
self.openfile(defined = file_name)
else:
self.openfile(defined= self._root_dir + '/' + file_name)
def button_load_info_click(self, event = None):
''' Get the load information for one line.'''
if self._active_line != '' and self._active_line in self._line_to_struc.keys():
load_text = self.calculate_all_load_combinations_for_line(self._active_line, get_load_info=True)
text_to_frame = 'Load results for ' + self._active_line + '\n' + '\n'
for item in load_text:
text_to_frame += item
text_m = tk.Toplevel(self._parent, background=self._general_color)
# Create the text widget
text_widget = tk.Text(text_m, height=60, width=80)
# Create a scrollbar
scroll_bar = ttk.Scrollbar(text_m)
# Pack the scroll bar
# Place it to the right side, using tk.RIGHT
scroll_bar.pack(side=tk.RIGHT)
# Pack it into our tkinter application
# Place the text widget to the left side
text_widget.pack(side=tk.LEFT)
# Insert text into the text widget
text_widget.insert(tk.END, text_to_frame)
#tk.messagebox.showinfo('Load info for '+self._active_line, ''.join(load_text))
else:
tk.messagebox.showerror('No data', 'No load data for this line')
def on_plot_cog_dev(self):
'''
Plot the COG and COB development.
'''
if self._weight_logger['new structure']['time'] == []:
tk.messagebox.showinfo('New functionality ver. 3.3', 'If you are using and existing model,'
' weights have not been'
' recorded in previous versions.\n'
'Press "Add structure properties to line....." button to add a '
'blank datapoint.\n'
'Other data will then be avaliable.\n\n'
'If you are making a new model add some structure properties.')
return
import matplotlib.dates as mdate
cog = np.array(self._weight_logger['new structure']['COG'])
weight = np.array(self._weight_logger['new structure']['weight'])/\
max(self._weight_logger['new structure']['weight'])
time_stamp = np.array(self._weight_logger['new structure']['time'])
time_stamp = [mdate.epoch2num(val) for val in time_stamp]
structure = self.get_unique_plates_and_beams()
hlp.plot_weights(time_stamp=time_stamp, cog=cog,structure=structure,weight=weight)
def on_open_structure_window(self, clicked_button = None):
'''
Opens the window to create structure.
:return:
'''
self._clicked_section_create = clicked_button # Identifying the clicked button
top_opt = tk.Toplevel(self._parent, background=self._general_color)
struc.CreateStructureWindow(top_opt, self)
def on_open_stresses_window(self):
'''
User can open a new window to stresses
:return:
'''
if self._line_is_active:
top_opt = tk.Toplevel(self._parent, background=self._general_color)
stress.CreateStressesWindow(top_opt, self)
else:
messagebox.showinfo(title='Select line',message='You must select a line')
def on_open_fatigue_window(self):
'''
User can open a new window to stresses
:return:
'''
if self._line_is_active:
try:
self._line_to_struc[self._active_line]
except KeyError:
messagebox.showinfo(title='Select line', message='Fatigue properties are defined here.\n'
'Strucure must be added to line before setting\n'
'these properties ("Add structure to line"-button).')
return
top_opt = tk.Toplevel(self._parent, background=self._general_color)
fatigue.CreateFatigueWindow(top_opt, self)
else:
messagebox.showinfo(title='Select line',message='You must select a line')
def on_open_load_factor_window(self):
'''
Set the default load factors and change all.
:return:
'''
lf_tkinter = tk.Toplevel(self._parent, background=self._general_color)
load_factors.CreateLoadFactorWindow(lf_tkinter, self)
def on_puls_results_for_line(self):
if not self._line_is_active:
return
if self._PULS_results is None:
return
elif self._PULS_results.get_puls_line_results(self._active_line) is None:
return
# if self._puls_information_button.config('relief')[-1] == 'sunken':
# self.text_widget.forget()
# self._puls_information_button.config(relief='raised')
this_result = self._PULS_results.get_puls_line_results(self._active_line)
this_string = ''
for key, value in this_result.items():
if type(value) == list:
this_string += key + ' : ' + str(value[0]) + ' ' + str(value[1]) + '\n'
elif type(value) == str:
this_string += key + ' : ' + value + '\n'
elif type(value) == dict:
this_string += key + '\n'
for subk, subv in value.items():
this_string += ' ' + subk + ' : ' + str(subv[0]) + ' ' + str(subv[1] if subv[1] != None else '') + '\n'
text_m = tk.Toplevel(self._parent, background=self._general_color)
# Create the text widget
text_widget = tk.Text(text_m , height=60, width=100)
# Create a scrollbar
scroll_bar = ttk.Scrollbar(text_m)
# Pack the scroll bar
# Place it to the right side, using tk.RIGHT
scroll_bar.pack(side=tk.RIGHT)
# Pack it into our tkinter application
# Place the text widget to the left side
text_widget.pack(side=tk.LEFT)
long_text = this_string
# Insert text into the text widget
text_widget.insert(tk.END, long_text)
def on_show_loads(self):
'''
User can open a new window to specify loads
:return:
'''
try:
img_file_name = 'img_ext_pressure_button_def.gif'
if os.path.isfile('images/' + img_file_name):
file_path = 'images/' + img_file_name
else:
file_path = self._root_dir + '/images/' + img_file_name
photo = tk.PhotoImage(file=file_path)
self._ext_button.config(image = photo)
self._ext_button.image = photo
except TclError:
pass
self.__previous_load_data = copy.deepcopy(self._load_dict)
top = tk.Toplevel(self._parent, background=self._general_color)
load_window.CreateLoadWindow(top, self)
def on_optimize(self):
'''
User open window to optimize current structure
:return:
'''
# if [self.get_highest_pressure(line)['normal'] for line in self._line_to_struc.keys()] == []:
# # messagebox.showinfo(title='Missing something', message='Missing properties/loads etc.')
# # return
try:
self.get_highest_pressure(self._active_line)['normal']
except (KeyError, AttributeError):
messagebox.showinfo(title='Missing loads/accelerations',
message='Select line or make some loads for the line.\n'+
'Define accelerations for compartments.')
return
if self._line_is_active:
if self._active_line not in self._line_to_struc:
messagebox.showinfo(title='Missing properties', message='Specify properties for line')
elif self._line_to_struc[self._active_line][3] == None:
messagebox.showinfo(title='Missing loads', message='Make some loads for the line')
else:
top_opt = tk.Toplevel(self._parent, background=self._general_color)
opw.CreateOptimizeWindow(top_opt, self)
else:
messagebox.showinfo(title='Select line',message='You must select a line')
def on_optimize_cylinder(self):
'''
User open window to optimize current structure
:return:
'''
# if [self.get_highest_pressure(line)['normal'] for line in self._line_to_struc.keys()] == []:
# # messagebox.showinfo(title='Missing something', message='Missing properties/loads etc.')
# # return
if self._line_is_active:
if self._active_line not in self._line_to_struc:
messagebox.showinfo(title='Missing properties', message='Specify properties for line')
elif self._line_to_struc[self._active_line][5] == None:
messagebox.showinfo(title='Missing cylinder', message='Make a shell or panel')
else:
top_opt = tk.Toplevel(self._parent, background=self._general_color)
opc.CreateOptimizeCylinderWindow(top_opt, self)
else:
messagebox.showinfo(title='Select line',message='You must select a line')
def on_optimize_multiple(self):
'''
Used to optimize in batch mode.
:return:
'''
if [self.get_highest_pressure(line)['normal'] for line in self._line_to_struc.keys()] == []:
messagebox.showinfo(title='Missing something', message='Make something')
return
try:
[self.get_highest_pressure(line)['normal'] for line in self._line_to_struc.keys()]
except KeyError:
messagebox.showinfo(title='Missing loads', message='The MultiOpt requires that loads have been defined.\n')
return
messagebox.showinfo(title='Multiple optimization information',
message='Opening this window enables batch optimization.\n'
'There are less input and information. It is HIGHLY\n'
'recommended to single optimize first (optimize button).\n'
'This way you will understand how the optimizer works.\n'
'\n'
'A default range of T properties is chosen. Typical analysis\n'
'steps (deltas) is chosen.')
top_opt = tk.Toplevel(self._parent, background=self._general_color)
opwmult.CreateOptimizeMultipleWindow(top_opt,self)
def on_geometry_optimize(self):
'''
:param returned_objects:
:return:
'''
if [self.get_highest_pressure(line)['normal'] for line in self._line_to_struc.keys()] == []:
messagebox.showinfo(title='Missing something', message='Make something')
return
try:
[self.get_highest_pressure(line)['normal'] for line in self._line_to_struc.keys()]
except KeyError:
messagebox.showinfo(title='Missing loads', message='The SpanOpt requires that loads have been defined.\n')
return
messagebox.showinfo(title='Span optimization module', message =
'Computationally heavy! Will run for a long time.\n'
'It is HIGHLY recommended to run predefined stiffeners. \n\n'
'WEIGHT INDEX is the most important result.\n'
'Results are presented for information and can not be returned to main model.\n'
'Weight index will show you the span length that will give the lowest weight.\n'
'\n'
'A default range of T properties is chosen. Typical analysis\n'
'steps (deltas) is chosen.\n'
'Loads are taken from existing structure.')
top_opt = tk.Toplevel(self._parent, background=self._general_color)
optgeo.CreateOptGeoWindow(top_opt,self)
def on_close_load_window(self, returned_loads, counter, load_comb_dict):
'''
Setting properties created in load window.
:return:
'''
self.save_no_dialogue(backup=True) # keeping a backup
try:
img_file_name = 'img_ext_pressure_button.gif'
if os.path.isfile('images/' + img_file_name):
file_path = 'images/' + img_file_name
else:
file_path = self._root_dir + '/images/' + img_file_name
photo = tk.PhotoImage(file=file_path)
self._ext_button.config(image = photo)
self._ext_button.image = photo
except TclError:
pass
self._load_window_couter = counter
self._new_load_comb_dict = load_comb_dict
temp_load = self.__previous_load_data
if len(returned_loads) != 0:
need_to_recalc_puls = {}
for load, data in returned_loads.items():
#creating the loads objects dictionary
self._load_dict[load] = data
# adding values to the line dictionary. resetting first.
for key, value in self._line_to_struc.items():
self._line_to_struc[key][3] = []
self._line_to_struc[key][0].need_recalc = True # All lines need recalculations.
for main_line in self._line_dict.keys():
for load_obj, load_line in self._load_dict.values():
if main_line in self._line_to_struc.keys():
if load_obj.get_name() in temp_load.keys():
if any([load_obj.__str__() != temp_load[load_obj.get_name()][0].__str__() and main_line in \
load_line+temp_load[load_obj.get_name()][1],
main_line in list(set(temp_load[load_obj.get_name()][1]).symmetric_difference(set(load_line)))]) :
# The load has changed for this line.
if self._PULS_results is not None:
self._PULS_results.result_changed(main_line)
elif main_line in load_line:
# This is a new load for this line.
if self._PULS_results is not None:
self._PULS_results.result_changed(main_line)
if main_line in load_line and main_line in self._line_to_struc.keys():
self._line_to_struc[main_line][3].append(load_obj)
# Storing the the returned data to temporary variable.
self.__returned_load_data = [returned_loads, counter, load_comb_dict]
# Calculating center of buoyancy from static cases.
if self._grid_calc is not None:
self.get_cob() # Update COB
self.update_frame()
def on_close_opt_window(self,returned_object):
'''
Sets the returned properties.
:param returned_structure:
:return:
'''
self.save_no_dialogue(backup=True) # keeping a backup
self.new_structure(multi_return = returned_object[0:2])
# self._line_to_struc[self._active_line][1]=returned_objects[0]
# self._line_to_struc[self._active_line][1]=returned_objects[1]
# self._line_to_struc[self._active_line][0].need_recalc = True
# self.set_selected_variables(self._active_line)
# if returned_objects[2] is not None:
# self._line_to_struc[self._active_line][2] = CalcFatigue(returned_objects[0].get_structure_prop(),
# returned_objects[2])
# self.new_structure()
self.update_frame()
def on_close_opt_cyl_window(self,returned_object):
'''
Sets the returned properties.
:param returned_structure:
:return:
'''
self.new_structure(cylinder_return = returned_object[0])
self.update_frame()
def on_close_opt_multiple_window(self, returned_objects):
'''
Sets the returned properties.
:param returned_structure:
:return:
'''
self.save_no_dialogue(backup=True) # keeping a backup
for line,all_objs in returned_objects.items():
self._active_line = line
#self._line_to_struc[line][0].need_recalc = True
self.new_structure(multi_return= all_objs[0:2])
self.update_frame()
def on_close_structure_window(self,returned_structure):
'''
Setting the input field to specified properties
:param returned_structure:
:return:
self._shell_ring_stf_gui_items = [self._lab_shell_ring_stiffener,self._ent_shell_ring_stf_hw,
self._ent_shell_ring_stf_tw,self._ent_shell_ring_stf_b,
self._ent_shell_ring_stf_tf, self._ent_shell_ring_stf_tripping_brackets,
self._ent_shell_ring_stf_type, self._chk_shell_ring_frame_exclude,
self._btn_shell_stf_section_ring_stf]
'''
clicked_button = returned_structure[7] #["long stf", "ring stf", "ring frame", "flat long stf", 'flat stf', 'flat girder']
if clicked_button in ["long stf", "flat long stf", 'flat stf']:
self._new_stf_spacing.set(returned_structure[0])
self._new_plate_thk.set(returned_structure[1])
self._new_stf_web_h.set(returned_structure[2])
self._new_stf_web_t.set(returned_structure[3])
self._new_stf_fl_w.set(returned_structure[4])
self._new_stf_fl_t.set(returned_structure[5])
self._new_stf_type.set(returned_structure[6])
elif clicked_button == 'flat girder':
self._new_girder_web_h.set(returned_structure[2])
self._new_girder_web_t.set(returned_structure[3])
self._new_girder_fl_w.set(returned_structure[4])
self._new_girder_fl_t.set(returned_structure[5])
self._new_girder_type.set(returned_structure[6])
elif clicked_button == "ring stf":
self._new_shell_ring_stf_hw.set(returned_structure[2])
self._new_shell_ring_stf_tw.set(returned_structure[3])
self._new_shell_ring_stf_b.set(returned_structure[4])
self._new_shell_ring_stf_tf.set(returned_structure[5])
elif clicked_button == "ring frame":
self._new_shell_ring_frame_hw.set(returned_structure[2])
self._new_shell_ring_frame_tw.set(returned_structure[3])
self._new_shell_ring_frame_b.set(returned_structure[4])
self._new_shell_ring_frame_tf.set(returned_structure[5])
section = struc.Section({'stf_type': returned_structure[6],
'stf_web_height': returned_structure[2]/1000,
'stf_web_thk': returned_structure[3]/1000,
'stf_flange_width': returned_structure[4]/1000,
'stf_flange_thk': returned_structure[5]/1000})
self._sections = add_new_section(self._sections, section)
def on_close_stresses_window(self,returned_stress_and_km):
'''
Sets the returned transverse/axial/shear stresses (global estimated values).
Sets the km1,km2,km3 paramter.
:param returned_stress_and_km:
:return:
'''
self._new_sigma_y1.set(returned_stress_and_km[0])
self._new_sigma_y2.set(returned_stress_and_km[1])
self._new_sigma_x1.set(returned_stress_and_km[2])
self._new_sigma_x2.set(returned_stress_and_km[3])
self._new_tauxy.set(returned_stress_and_km[4])
self._new_stf_km1.set(returned_stress_and_km[5])
self._new_stf_km1.set(returned_stress_and_km[6])
self._new_stf_km1.set(returned_stress_and_km[7])
self._new_plate_kpp.set(returned_stress_and_km[8])
self._new_stf_kps.set(returned_stress_and_km[9])
self._new_stucture_type.set(returned_stress_and_km[10],)
def on_close_fatigue_window(self,returned_fatigue_prop: dict):
'''
Sets the returned fatigue properteis.
:param returned_stress_and_km:
:return:
'''
if self._line_to_struc[self._active_line][2] == None:
self._line_to_struc[self._active_line][2] = CalcFatigue(self._line_to_struc[self._active_line][0].Plate
.get_structure_prop(),
returned_fatigue_prop)
else:
self._line_to_struc[self._active_line][2].set_fatigue_properties(returned_fatigue_prop)
self._line_to_struc[self._active_line][0].need_recalc = True
if self.__returned_load_data is not None:
map(self.on_close_load_window, self.__returned_load_data)
# adding values to the line dictionary. resetting first.
for key, value in self._line_to_struc.items():
if self._line_to_struc[key][2] is not None:
self._line_to_struc[key][2].set_commmon_properties(returned_fatigue_prop)
self._line_to_struc[key][0].need_recalc = True # All lines need recalculations.
self.update_frame()
def on_aborted_load_window(self):
'''
When it is aborted due to closing.
:return:
'''
try:
img_file_name = 'img_ext_pressure_button.gif'
if os.path.isfile('images/' + img_file_name):
file_path = 'images/' + img_file_name
else:
file_path = self._root_dir + '/images/' + img_file_name
photo = tk.PhotoImage(file=file_path)
self._ext_button.config(image = photo)
self._ext_button.image = photo
except TclError:
pass
def on_close_load_factor_window(self, returned_load_factors):
'''
self._load_factors_dict = {'dnva':[1.3,1.2,0.7], 'dnvb':[1,1,1.3], 'tanktest':[1,1,0]} # DNV loads factors
self._new_load_comb_dict = {(dnv cond, line, load type) : (stat lf, dyn lf, include)}
:param returned_load_factors: list [stat lf, dyn lf]
:return:
'''
self._load_factors_dict = returned_load_factors['returned lf dict']
for name, data in self._new_load_comb_dict.items():
if name[0] == 'manual':
continue
if data[0].get() != 0:
data[0].set(self._load_factors_dict[name[0]][1])
if data[1].get() != 0:
data[1].set(self._load_factors_dict[name[0]][2])
def close_main_window(self):
'''
Save of not save when closing window.
:return:
'''
mess = tk.messagebox.showwarning('Close main window', 'Save before closing?',type = 'yesnocancel')
self.save_no_dialogue(backup=True) # keeping a backup
if mess == 'yes':
self.savefile()
self._parent.destroy()
elif mess == 'no':
self._parent.destroy()
elif mess == 'cancel':
pass
def on_color_code_check(self, event = None):
if [self._new_colorcode_beams.get(), self._new_colorcode_plates.get(),
self._new_colorcode_pressure.get(), self._new_colorcode_utilization.get(),
self._new_colorcode_sigmax.get(), self._new_colorcode_sigmay1.get(), self._new_colorcode_sigmay2.get(),
self._new_colorcode_tauxy.get(), self._new_colorcode_structure_type.get(),
self._new_colorcode_section_modulus.get(), self._new_colorcode_fatigue.get(),
self._new_colorcode_total.get(), self._new_colorcode_puls_sp_or_up.get(),
self._new_colorcode_puls_acceptance.get(), self._new_colorcode_spacing.get()].count(True) > 1:
messagebox.showinfo(title='Information', message='Can only select on color code at the time.')
self._new_colorcode_beams.set(False)
self._new_colorcode_plates.set(False)
self._new_colorcode_pressure.set(False)
self._new_colorcode_utilization.set(False)
self._new_colorcode_sigmax.set(False)
self._new_colorcode_sigmay1.set(False)
self._new_colorcode_sigmay2.set(False)
self._new_colorcode_tauxy.set(False)
self._new_colorcode_structure_type.set(False)
self._new_colorcode_section_modulus.set(False)
self._new_colorcode_fatigue.set(False)
self._new_colorcode_total.set(False)
self._new_colorcode_puls_acceptance.set(False)
self._new_colorcode_puls_sp_or_up.set(False)
self.update_frame()
def logger(self, line = None, point = None, move_coords = None):
''' Log to be used for undo and redo. '''
if line is not None:
self._logger['added'].append([line[0], self._line_dict[line[0]]])
elif point is not None and move_coords is None:
self._logger['added'].append([point, None])
elif point is not None and move_coords is not None:
self._logger['added'].append([point, move_coords])
else:
pass
def undo(self, event = None):
''' Method to undo and redo. '''
if len(self._logger['added']) > 0:
current = self._logger['added'].pop(-1)
if 'point' in current[0] and current[1] is None:
if current[0] not in self._logger['deleted']:
self._logger['deleted'].append(current)
self.delete_point(undo=current[0])
elif 'point' in current[0] and current[1] is not None:
self.move_point(redo=current[1][0])
elif 'line' in current[0]:
if current[0] not in [line[0] for line in self._logger['deleted']]:
self._logger['deleted'].append(current)
self.delete_line(undo=current[0])
def redo(self, event = None):
''' Method to undo and redo. '''
if len(self._logger['deleted']) > 0:
current = self._logger['deleted'].pop(-1)
if 'point' in current[0] and current[1] is None:
self.new_point(redo=current[0])
elif 'point' in current[0] and current[1] is not None:
self.move_point(redo=current[1][1])
elif 'line' in current[0]:
self.new_line(redo=['point'+str(num) for num in current[1]])
def open_documentation_pdf(self):
''' Open the documentation pdf. '''
if os.path.isfile('ANYstructure_documentation.pdf'):
os.startfile('ANYstructure_documentation.pdf')
else:
os.startfile(self._root_dir + '/' + 'ANYstructure_documentation.pdf')
def open_documentation(self):
''' Open the documentation webpage. '''
import webbrowser
webbrowser.open('https://sites.google.com/view/anystructure/start', new=0, autoraise=True)
def open_donate(self):
''' Open the documentation webpage. '''
import webbrowser
webbrowser.open('https://sites.google.com/view/anystructure/donate', new=0, autoraise=True)
def open_about(self):
'''
Open a about messagebox.
:return:
'''
messagebox.showinfo(title='Information', message='ANYstructure 4.x (Stable/Production)'
'\n'
'\n'
'By Audun Arnesen Nyhus \n'
'2022\n\n'
'All technical calculation based on \n'
'DNV RPs and standards')
def export_to_js(self):
'''
Printing to a js file
:return:
'''
save_file = filedialog.asksaveasfile(mode="w", defaultextension=".js")
if save_file is None: # ask saveasfile return `None` if dialog closed with "cancel".
return
# Setting up interface class.
JS = sesam.JSfile(self._point_dict, self._line_dict, self._sections, self._line_to_struc)
JS.write_points()
JS.write_lines()
JS.write_sections()
JS.write_beams()
save_file.writelines(JS.output_lines)
save_file.close()
if __name__ == '__main__':
# multiprocessing.freeze_support()
# errorCode = ctypes.windll.shcore.SetProcessDpiAwareness(2)
# root = tk.Tk()
# root.tk.call("source", "sun-valley.tcl")
# root.tk.call("set_theme", "dark")
# style = ttk.Style(root)
# root.tk.eval("""
# set dir C:/Users/cefany/Downloads/awthemes-10.4.0
#
# package ifneeded awthemes 10.4.0 \
# [list source [file join $dir awthemes.tcl]]
# package ifneeded colorutils 4.8 \
# [list source [file join $dir colorutils.tcl]]
# package ifneeded awarc 1.6.1 \
# [list source [file join $dir awarc.tcl]]
# package ifneeded ttk::theme::awarc 1.6.1 \
# [list source [file join $dir awarc.tcl]]
# package ifneeded awblack 7.8.1 \
# [list source [file join $dir awblack.tcl]]
# package ifneeded ttk::theme::awblack 7.8.1 \
# [list source [file join $dir awblack.tcl]]
# package ifneeded awbreeze 1.9.1 \
# [list source [file join $dir awbreeze.tcl]]
# package ifneeded ttk::theme::awbreeze 1.9.1 \
# [list source [file join $dir awbreeze.tcl]]
# package ifneeded awbreezedark 1.0.1 \
# [list source [file join $dir awbreezedark.tcl]]
# package ifneeded ttk::theme::awbreezedark 1.0.1 \
# [list source [file join $dir awbreezedark.tcl]]
# package ifneeded awclearlooks 1.3.1 \
# [list source [file join $dir awclearlooks.tcl]]
# package ifneeded ttk::theme::awclearlooks 1.3.1 \
# [list source [file join $dir awclearlooks.tcl]]
# package ifneeded awdark 7.12 \
# [list source [file join $dir awdark.tcl]]
# package ifneeded ttk::theme::awdark 7.12 \
# [list source [file join $dir awdark.tcl]]
# package ifneeded awlight 7.10 \
# [list source [file join $dir awlight.tcl]]
# package ifneeded ttk::theme::awlight 7.10 \
# [list source [file join $dir awlight.tcl]]
# package ifneeded awtemplate 1.5.1 \
# [list source [file join $dir awtemplate.tcl]]
# package ifneeded ttk::theme::awtemplate 1.5.1 \
# [list source [file join $dir awtemplate.tcl]]
# package ifneeded awwinxpblue 7.9.1 \
# [list source [file join $dir awwinxpblue.tcl]]
# package ifneeded ttk::theme::awwinxpblue 7.9.1 \
# [list source [file join $dir awwinxpblue.tcl]]
#
# package require tksvg
#
# """)
# root.tk.call("package", "require", 'awwinxpblue')
# style.theme_use('awwinxpblue')
# width = int(root.winfo_screenwidth()*1)
# height = int(root.winfo_screenheight()*0.95)
# root.geometry(f'{width}x{height}')
# my_app = Application(root)
# root.mainloop()
multiprocessing.freeze_support()
errorCode = ctypes.windll.shcore.SetProcessDpiAwareness(2)
root = tk.Tk()
width = root.winfo_screenwidth()
height = root.winfo_screenheight()
root.geometry(f'{width}x{height}')
my_app = Application(root)
root.mainloop()
#Application(None).openfile(r'C:\Github\ANYstructure\ANYstructure\ship_section_example.txt', alone=True) | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/main_application.py | main_application.py |
import math
from matplotlib.backends import backend_tkagg
from matplotlib import pyplot as plt
import numpy as np
from collections import deque
import copy
import matplotlib.animation as animation
try:
import any_files.example_data as test
except ModuleNotFoundError:
import ANYstructure.any_files.example_data as test
def dist(p, q):
return math.sqrt((p[0] - q[0]) ** 2 + (p[1] - q[1]) ** 2)
class CreateGridWindow():
def __init__(self, grid, canvas_dim, to_draw, canvas_origo, find_lines: bool = False):
self._grid = grid
self._parent_dimensions = canvas_dim
self._to_draw = to_draw
self._parent_origo = canvas_origo
self._points_child = {}
self._child_dimensions = (canvas_dim[0]-canvas_origo[0]+1, canvas_origo[1]+1)
self._bfs_search_data = None
for line,point in to_draw.items():
point1 = (int(point[0][0]),int(point[0][1]))
point2 = (int(point[1][0]),int(point[1][1]))
self._points_child[line] = [point1,point2]
for line, points in self._points_child.items():
for point in self._grid.get_points_along_line(points[0],points[1]):
if not find_lines:
self._grid.set_barrier(point[0],point[1])
else:
self._grid.set_barrier(point[0], point[1], line_number = hlp.get_num(line))
def __str__(self):
return 'class CreateGridWindow(): __str__ - Not implemented'
@property
def grid(self):
return self._grid
@grid.setter
def grid(self, val):
self._grid = val
@property
def bfs_search_data(self):
return self._bfs_search_data
@bfs_search_data.setter
def bfs_search_data(self, val):
self._bfs_search_data = val
def draw_grid(self, save = False, tank_count = None):
'''
Drawing grid
EMPTY = yellow
FULL = red
:return:
'''
def discrete_matshow(data):
if self._bfs_search_data is not None:
comp_cell_count = self._bfs_search_data
area_mult = self._parent_dimensions[0]/(10*self._parent_dimensions[0]) * \
self._parent_dimensions[1]/(10*self._parent_dimensions[1])
plt_txt = list()
tank_iter = [] if tank_count == None else range(tank_count)
for num in tank_iter:
if self._bfs_search_data is not None:
tank_area = comp_cell_count[num + 2] * area_mult
plt_txt.append('Comp' + str(num + 2) + ' Approx. area: ' + str(round(tank_area,1)))
else:
plt_txt.append('Comp' + str(num + 2))
fig = plt.figure(figsize=[12, 8])
ax = fig.add_subplot(111)
ax.tick_params(labelsize=8)
fig.subplots_adjust(left=0.05, right=0.90, top=0.95, bottom=0.05)
# get discrete colormap
#cmap = plt.get_cmap('Accent_r', np.int32(np.max(data)) - np.int32(np.min(data)) + 1)
cmap = plt.get_cmap('jet', np.int32(np.max(data)) - np.int32(np.min(data)) + 1)
# set limits .5 outside true range
cax = ax.matshow(data, cmap=cmap, vmin=np.min(data) - .5, vmax=np.max(data) + .5)
# tell the colorbar to tick at integers
colb = fig.colorbar(cax, ticks=np.arange(np.min(data), np.max(data) + 1), shrink=0.8)
if tank_count is not None:
colb.set_ticks([-1, 0, 1] + [num + 2 for num in range(tank_count)])
colb.set_ticklabels(['BHD/Deck', 'Not searched', 'External'] + plt_txt)
# generate data
discrete_matshow(self._grid.get_matrix())
plt.suptitle('Compartments returned from search operation displayed below', fontsize=20, color='red')
plt.xscale('linear')
plt.axis('off')
plt.annotate('*area calculation inaccuracies due to thickness of barriers (BHD/Deck)', (0, 0), (0, -20),
xycoords='axes fraction', textcoords='offset points', va='top', fontsize = 10)
if save:
plt.savefig('current_comps.png')
else:
plt.show()
def animate_grid(self, grids_to_animate: list = None, tank_count = None):
''' If animation is selected, the grid is shown here. '''
all_grids = grids_to_animate
def generate_data():
if len(all_grids) == 0:
ani.event_source.stop()
current_grid = all_grids.pop(0)
return current_grid
def update(data):
if len(all_grids) == 0:
ani.event_source.stop()
cax.set_data(data)
return cax
def data_gen():
if len(all_grids) == 0:
ani.event_source.stop()
while True:
yield generate_data()
plt.ion()
#tank_count = np.max(all_grids[-1])
fig = plt.figure(figsize=[12, 8])
ax = fig.add_subplot(111)
ax.tick_params(labelsize=8)
fig.subplots_adjust(left=0.05, right=0.90, top=0.95, bottom=0.05)
# get discrete colormap
cmap = plt.get_cmap('Accent_r', np.int32(np.max(all_grids[-1])) - np.int32(np.min(all_grids[-1])) + 1)
# set limits .5 outside true range
cax = ax.matshow(all_grids[-1], cmap=cmap, vmin=np.min(all_grids[-1]) - .5, vmax=np.max(all_grids[-1]) + .5)
# tell the colorbar to tick at integers
colb = fig.colorbar(cax, ticks=np.arange(np.min(all_grids[-1]), np.max(all_grids[-1]) + 1), shrink=0.8)
if self._bfs_search_data is not None:
comp_cell_count = self._bfs_search_data
area_mult = self._parent_dimensions[0] / (10 * self._parent_dimensions[0]) * \
self._parent_dimensions[1] / (10 * self._parent_dimensions[1])
plt_txt = list()
for num in range(tank_count):
if self._bfs_search_data is not None:
tank_area = comp_cell_count[num + 2] * area_mult
plt_txt.append('Comp' + str(num + 2) + ' Approx. area: ' + str(round(tank_area, 1)))
else:
plt_txt.append('Comp' + str(num + 2))
if tank_count is not None:
colb.set_ticks([-1, 0, 1] + [num + 2 for num in range(int(tank_count))])
colb.set_ticklabels(['BHD/Deck', 'Not searched', 'External'] + plt_txt)
ani = animation.FuncAnimation(fig, update, data_gen, interval=50)
fm = plt.get_current_fig_manager()
#fm.window.activateWindow()
#fm.window.raise_()
plt.suptitle('Compartments returned from search operation displayed below', fontsize=20, color='red')
plt.xscale('linear')
plt.axis('off')
plt.annotate('*area calculation inaccuracies due to thickness of barriers (BHD/Deck)', (0, 0), (0, -20),
xycoords='axes fraction', textcoords='offset points', va='top', fontsize = 10)
plt.show()
def search_bfs(self, animate = False):
'''
Bredth first search method.
Searcing every 20th pixel for empty places in the grid. When a empty cell is found, the search starts.
The search ends when no more empty cells are found in the boudnary regions (circular expansion of search).
USE GRID CONVENSION HERE. NOT POINTS.
grid(row,col) is same as grid(y,x)
points uses
point(x , y) is same as grid(col,row)
:return:
'''
compartment_count = 1
compartments = {}
all_grids = []
anim_count = 0
if animate:
all_grids.append(self._grid.get_matrix())
barriers_where = np.where(self._grid.cells.reshape((1,np.product(self._grid.cells.shape))) == -1)
barrier_comp_count = dict()
for startrow in range(0, self._child_dimensions[1], 20):
for startcol in range(0, self._child_dimensions[0], 20):
if self._grid.is_empty(startrow,startcol):
el_max = ''
el_min = ''
cells = 0
boundary = deque()
boundary.append((startrow,startcol))
corners = []
barrier_comp_count[compartment_count] = 0
while len(boundary) != 0:
current_cell = boundary.pop()
#find the min/max elevation, counting cells in tank
if el_max == '':
el_max = current_cell[0]
el_min = current_cell[0]
else:
if current_cell[0] < el_max:
el_max = current_cell[0]
if current_cell[0] > el_min:
el_min = current_cell[0]
cells += 1
anim_count += 1
four_neighbors = self._grid.four_neighbors(current_cell[0], current_cell[1])
neighbors = self._grid.eight_neighbors(current_cell[0], current_cell[1])
#doing serach operations and looking for corners
no_of_barriers = 0
for neighbor in four_neighbors:
if self._grid.get_value(neighbor[0], neighbor[1]) == -1:
no_of_barriers += 1
barrier_comp_count[compartment_count] += 1
else:
pass
if self._grid.is_empty(neighbor[0], neighbor[1]):
self._grid.set_value(neighbor[0], neighbor[1],compartment_count)
boundary.append(neighbor)
if animate:
if compartment_count > 1:
anim_interval = 2000
else:
anim_interval = 20000
if anim_count/anim_interval - anim_count//anim_interval == 0.0:
all_grids.append(copy.deepcopy(self._grid.get_matrix()))
#finding corners on diagonal cells
for neighbor in [item for item in neighbors if item not in four_neighbors]:
if self._grid.get_value(neighbor[0], neighbor[1]) == -1:
no_of_barriers += 1
else:
pass
if no_of_barriers > 4:
corners.append((neighbor[0], neighbor[1]))
# returning values to the program
compartments[compartment_count] = cells, corners
compartment_count += 1
if animate:
all_grids.append(self._grid.get_matrix())
cells_modified, area_modified = dict(), dict()
comp_sum = np.sum([data for data in barrier_comp_count.values()])
for comp_no, data in compartments.items():
barrier_ratio_of_total = barrier_comp_count[comp_no] / comp_sum
if np.isnan(barriers_where[0].shape[0] * barrier_ratio_of_total):
continue
cells_modified[comp_no] = data[0] + int(barriers_where[0].shape[0] * barrier_ratio_of_total)
to_return = {'compartments': compartments, 'grids':all_grids, 'modified_cell_count': cells_modified}
self.bfs_search_data = to_return['modified_cell_count']
self._grid.bfs_search_data = to_return['modified_cell_count']
return to_return
def find_lines_inside_area(self, row1, col1, row2, col2):
'''
Define a search area.
Return the lines in this area.
This method makes sense if "find_lines" is set to True.
'''
return np.unique(self._grid.get_array()[row1:row2, col1:col2])
if __name__ == '__main__':
import time
t1 = time.time()
canvas_dim = [1000,720]
canvas_origo = (50,670)
my_grid = CreateGridWindow(test.get_grid_no_inp(), canvas_dim, test.get_to_draw(), canvas_origo)
search_return = my_grid.search_bfs(animate = True)
my_grid.draw_grid(tank_count=4)
print(np.unique(my_grid.grid)) | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/grid_window.py | grid_window.py |
import numpy as np
class Loads():
'''
This Class calculates the load to be applied on the structure
'''
def __init__(self, main_load_dict):
self.main_load_dict = main_load_dict
self.static_draft = main_load_dict['static_draft']
self.poly_third = main_load_dict['poly_third']
self.poly_second = main_load_dict['poly_second']
self.poly_first = main_load_dict['poly_first']
self.poly_const = main_load_dict['poly_const']
self.manual_press = main_load_dict['man_press']
self.load_condition = main_load_dict['load_condition']
if main_load_dict['load_condition'] == 'slamming':
self.slamming_pl_reduction_factor = main_load_dict['slamming mult pl']
self.slamming_stf_reduction_factor = main_load_dict['slamming mult stf']
else:
self.slamming_pl_reduction_factor = 1
self.slamming_stf_reduction_factor = 1
self.name_of_load = main_load_dict['name_of_load']
try:
self.limit_state = main_load_dict['limit_state']
except KeyError:
self.limit_state = 'ULS'
try:
self.horizontal_types = main_load_dict['structure_types']['horizontal']
self.vertical_types = main_load_dict['structure_types']['vertical']
except KeyError:
self.horizontal_types = ['BOTTOM', 'BBT', 'HOPPER', 'MD']
self.vertical_types = ['BBS', 'SIDE_SHELL', 'SSS']
self.dynamic_pressure = 0
self.static_pressure = 0
self.is_external = True
def __str__(self):
string = str('Properties selected load is:'+
'\n----------------------------'+
'\n Name of load: ' + str(self.name_of_load) +
'\n Polynominal (x^3): '+ str(self.poly_third) +
'\n Polynominal (x^2): '+ str(self.poly_second) +
'\n Polynominal (x): '+ str(self.poly_first) +
'\n Constant (C): '+ str(self.poly_const) +
'\n Load condition: '+ str(self.load_condition) +
'\n Limit state ' + str(self.limit_state) +
'\n Is external? '+ str(self.is_external) +
'\n Static draft: '+ str(self.static_draft))
return string
def get_calculated_pressure(self,varibale_value, acceleration, structure_type):
'''
Input variable is a tuple of (x,y). This method need one variable and the right one must be chosen.
:param varibale_value:
:return:
'''
#print(' pressure requested for var/acc: ', varibale_value,'/',acceleration, 'type is: ', structure_type)
input_var = varibale_value
if self.is_static():
press = 1025 * acceleration * (self.static_draft - input_var[1])
elif structure_type in self.horizontal_types:
press = self.__calculate_poly_value(input_var[0])
elif structure_type in self.vertical_types:
press = self.__calculate_poly_value(input_var[1])
else:
press = 0
if self.load_condition == 'slamming':
psl = self.__calculate_poly_value(0)
return max(press, psl)
else:
return press
def get_report_string(self):
return [ 'Name of load: ' + self.name_of_load,
'Polynominal (x^3): '+ str(self.poly_third) ,
'Polynominal (x^2): '+ str(self.poly_second) ,
'Polynominal (x): '+ str(self.poly_first) ,
'Constant (C): '+ str(self.poly_const) ,
'Load condition: '+ str(self.load_condition) ,
'Limit state ' + str(self.limit_state) ,
'Is external? '+ str(self.is_external) ,
'Static draft: '+ str(self.static_draft)]
def __calculate_poly_value(self, variable):
'''
Returning magnitude of load in the polynominal equation.
:param variable:
:return:
'''
return np.polyval( [self.poly_third, self.poly_second, self.poly_first, self.poly_const], variable)
def get_load_condition(self):
return self.load_condition
def is_tank_test(self):
return self.load_condition == 'tanktest'
def get_load_parmeters(self):
return self.poly_third, self.poly_second, self.poly_first, self.poly_const, self.load_condition, \
None, self.manual_press, self.static_draft, self.name_of_load, self.limit_state, \
self.slamming_pl_reduction_factor, self.slamming_stf_reduction_factor
def get_name(self):
return self.name_of_load
def is_static(self):
'''
Checking if the load is static type.
:return:
'''
return self.static_draft != None
def get_static_draft(self):
'''
Return static draft if is_static
:return:
'''
if self.is_static():
return self.static_draft
else:
pass
def get_limit_state(self):
''' Return ULS, FLS.... '''
return self.limit_state
def get_load_condition(self):
''' Getting loaded, ballast or part '''
return self.load_condition
def get_slamming_reduction_plate(self):
return self.slamming_pl_reduction_factor
def get_slamming_reduction_stf(self):
return self.slamming_stf_reduction_factor
class Tanks():
'''
This class incorporates all tank definitions
temp_tank_dict = {0 'comp_no' : comp_no,
1 'cells' : properties[0],
2 'min_el' : properties[1],
3 'max_el' : properties[2],
4 'content' : '',
5 'added_press' : 0,
6 'acc' : {static:g,dyn_loaded:az,dyn_ballast:az}
7 'density' : 1025}
'''
def __init__(self, tank_dict):
self.properties = tank_dict
self.compartment_number = tank_dict['comp_no']
self.cells = tank_dict['cells']
self.min_elevation = tank_dict['min_el']
self.max_elevation = tank_dict['max_el']
self.content = tank_dict['content']
self.added_pressure = tank_dict['added_press']
self.density = tank_dict['density']
self.acc_static = tank_dict['acc']['static']
self.acc_dyn_loaded = tank_dict['acc']['dyn_loaded']
self.acc_dyn_ballast = tank_dict['acc']['dyn_ballast']
self.all_types = ['crude_oil', 'diesel', 'slop', 'fresh water', 'ballast']
def __str__(self):
'''
Prints a string for the tank.
:return:
'''
tank_string = str('--- Tank properties (selected tank) ---'+
'\n Minimum elevtaion: ' + str(self.min_elevation) +
'\n Maximum elevation: ' + str(self.max_elevation) +
'\n Content of tank: ' + self.content +
'\n Defined density: ' + str(self.density) +
'\n Defined acceleration: ' + 'st = ' + str(self.acc_static) + ' , azl = ' +
str(self.acc_dyn_loaded) + ' , azb = ' +
str(self.acc_dyn_ballast) +
'\n Added pressure at tank top: ' + str(self.added_pressure) )
return tank_string
def set_overpressure(self, overpressure):
'''
Setter
:param overpressure:
:return:
'''
self.added_pressure = overpressure
self.properties['added_press'] = overpressure
def set_content(self, content):
'''
Setter
:param overpressure:
:return:
'''
self.properties['content'] = content
self.content = content
def set_acceleration(self, acc):
'''
Setter
:param overpressure:
:return:
'''
self.acc_static = acc['static']
self.properties['static'] = acc['static']
self.acc_dyn_loaded = acc['dyn_loaded']
self.properties['dyn_loaded'] = acc['dyn_loaded']
self.acc_dyn_ballast = acc['dyn_ballast']
self.properties['dyn_ballast'] = acc['dyn_ballast']
def set_density(self,density):
'''
Setter
:param overpressure:
:return:
'''
self.properties['density'] = density
self.density = density
def get_name(self):
'''
Returns the name of the compartmnet
:return:
'''
return 'comp'+str(self.compartment_number)
def get_highest_elevation(self):
'''
Find the top of the tank.
:return:
'''
return self.max_elevation
def get_lowest_elevation(self):
'''
Find the bottom of the tank.
:return:
'''
return self.min_elevation
def get_line_pressure_from_max_pressure(self, pressure, coordinates):
'''
Used when you have a maximum pressure and request the pressure at a specific coordinate.
:param coordinates:
:return:
'''
elevation = coordinates[1]
return pressure *((self.get_highest_elevation()-elevation)/
(self.get_highest_elevation()-self.get_lowest_elevation()))
def get_calculated_pressure(self, coordinates, acceleration):
'''
Get the pressure with specified variable.
:param elevaiton:
:return:
'''
elevation = coordinates[1]
press = (self.get_highest_elevation()-elevation)*self.density*acceleration
#print(' tank calculated pressure: ',str(self.get_highest_elevation()),'-', str(elevation),'*', str(self.density),'*',str(acceleration), ' = ', press)
return press
def get_bottom_pressure(self):
'''
Get pressure at bottom of tank.
:return:
'''
return (self.get_highest_elevation() - self.get_lowest_elevation()) * self.density * self.acceleration + self.added_pressure
def get_top_pressure(self):
'''
Get the pressure at the top of the tank.
:return:
'''
return self.added_pressure
def get_density(self):
'''
Get the tank density.
:return:
'''
return self.density
def get_content(self):
'''
Returnt the tank content type
:return:
'''
return self.content
def get_accelerations(self):
'''
Returns the defined accelerations
:return:
'''
return (self.acc_static, self.acc_dyn_loaded,self.acc_dyn_ballast)
def get_overpressure(self):
'''
Get the overpressure at tank top.
:return:
'''
return self.added_pressure
def get_parameters(self):
'''
Returns properties
:return:
'''
# return_dict = {'comp_no':self.compartment_number, 'cells':self.cells, 'min_el':self.min_elevation,
# 'max_el':self.max_elevation, 'content':self.content, 'added_press':self.added_pressure,
# 'density':self.density, 'acc':{'static':self.acc_static, 'dyn_loaded':self.acc_dyn_loaded,
# 'dyn_ballast':self.acc_dyn_ballast}}
# return return_dict
return self.properties
def is_loaded_condition(self):
'''
Check to see if the tank shall be in cluded in loaded condition.
self.tank_options = ['crude_oil', 'diesel', 'slop', 'ballast']
:return:
'''
try: return self.content in self.all_types[0:4]
except AttributeError: return False
def is_ballast_condition(self):
'''
Check to see if the tank shall be in cluded in loaded condition.
:return:
'''
try: return self.content == self.all_types[4]
except AttributeError: return False
def is_tank_test_condition(self):
'''
Check to see if the tank shall be in cluded in loaded condition.
:return:
'''
try: return self.content == self.all_types
except AttributeError: return False
def get_condition(self):
'''
Returning the condition.
self.load_conditions = ['loaded', 'ballast','tanktest']
:return:
'''
try:
if self.is_ballast_condition():
return 'ballast'
elif self.is_loaded_condition():
return 'loaded'
elif self.is_tank_test_condition():
return 'tanktest'
except AttributeError: return False
def get_tank_dnv_minimum_pressure(self, lf_static, lf_enviromental):
'''
Calculating 4.3.7 and 4.3.8 and returning the highest of these pressures.
:return:
'''
if self.is_loaded_condition():
dyn_acc = self.acc_dyn_loaded
elif self.is_ballast_condition():
dyn_acc = self.acc_dyn_ballast
else:
dyn_acc = 0
hop = self.get_highest_elevation()-self.get_lowest_elevation()
#All tanks shall be designed for the following internal design pressure:
p_4_3_7 = self.density * self.acc_static * hop *(lf_static+(dyn_acc/self.acc_static)*lf_enviromental)
#For tanks where the air pipe may be filled during filling operations, the following additional internal
#design pressure conditions shall be considered:
p_4_3_8 = (self.density*self.acc_static*hop + self.get_overpressure())*lf_static
return max(p_4_3_7, p_4_3_8)
class Combination():
'''
THIS CLASS IS CURRENTLY NOT USED. MAY NOT BE USED AT ALL. IT IS STUPID.
This class cointaines the load combinations.
combination,self.active_line,compartment
'''
def __init__(self, object_line, comb_dict = None, tank_dict = None, load_dict = None):
'''
Input from main application is:
line for this object
tank_dict = {} #main tank dictionary (created when BFS search is executed for the grid) (comp# : TankObj)
load_dict = {} #main load dictionary (created in separate load window (load# : [LoadObj, lines])
comb_dict = {} #load combination dictionary (comb,line,load) : [DoubleVar(), DoubleVar], IntVar()]
'''
self.object_line = object_line
self.comb_dict = comb_dict
self.tank_dict = tank_dict
self.load_dict = load_dict
try: self.combination = comb_dict.keys()[0]
except AttributeError : self.combination = None
try: self.load_case = comb_dict.keys()[2]
except AttributeError: self.load_case = None
try: self.load_factor_static = comb_dict.values()[0]
except AttributeError: self.load_factor_static = None
try: self.load_factor_dynamic = comb_dict.values()[1]
except AttributeError: self.load_factor_dynamic = None
try: self.on_off = comb_dict.values()[2]
except AttributeError: self.on_off = None
def __str__(self):
return 'NOT IMPLEMENTED'
def get_load_factors(self):
'''
Get the tk.DoubleVar, tk.DoubleVar, tk.IntVar that is used in the load factor input and on/off.
:return:
'''
return self.load_factor_static.get(), self.load_factor_dynamic.get(), self.on_off.get()
def get_load_factor_static(self):
'''
Setting the the dynamic load factor.
:return:
'''
return self.load_factor_static.get()
def get_load_factor_dynamic(self):
'''
Setting the the dynamic load factor.
:return:
'''
return self.load_factor_dynamic.get()
def get_on_off(self, value):
'''
Setting the the dynamic load factor.
:return:
'''
return self.on_off.get()
def set_load_factor_static(self, value):
'''
Setting the the dynamic load factor.
:return:
'''
self.load_factor_static = value
def set_load_factor_dynamic(self, value):
'''
Setting the the dynamic load factor.
:return:
'''
self.load_factor_dynamic = value
def set_on_off(self, value):
'''
Setting the the dynamic load factor.
:return:
'''
self.on_off= value
def set_combination_dictionary(self, value):
'''
Setting the combination dictionary.
:return:
'''
self.comb_dict = value
assert tuple(value.keys())[0][1] == self.object_line, 'line is not correct!'
assert len(tuple(value.keys())[0]) == 3 , 'length of key must be 3'
assert len(tuple(value.values())[0]) == 3, 'length of values must be 3'
try: self.set_load_factor_static(list(value.values())[0][0])
except AttributeError: pass
try: self.set_load_factor_dynamic(list(value.values())[0][1])
except AttributeError: pass
try: self.set_on_off(list(value.values())[0][2])
except AttributeError: pass
def set_load_dictionary(self, value):
'''
Setting the load dictionary.
:return:
'''
self.load_dict = value
def set_tank_dictionary(self, value):
'''
Setting the tank dictionary.
:return:
'''
self.tank_dict = value
if __name__ == '__main__':
import example_data as ex
for load, type in zip([Loads(ex.load_bottom), Loads(ex.load_side), Loads(ex.load_static), Loads(ex.load_slamming)],
['BOTTOM', 'SIDE_SHELL', '', '']):
print(load.get_calculated_pressure((10,10), 3, type)) | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/calc_loads.py | calc_loads.py |
import math, copy, csv, os
import numpy as np
print_it = True
root_dir = os.path.dirname(os.path.abspath(__file__))
def print_helper(properties, prop_text, units):
'''
Used to print out the properties
'''
dummy_i = 0
print(' \n ')
for prop_i in prop_text:
print(str(prop_i) + ' ' + str(properties[dummy_i]) + ' ' + str(units[dummy_i]) )
dummy_i += 1
def dist(p, q):
return math.sqrt((p[0] - q[0]) ** 2 + (p[1] - q[1]) ** 2)
def get_num(x):
try:
return int(''.join(ele for ele in x if ele.isdigit() or ele == '.'))
except ValueError:
return x
def list_2_string(list):
new_string = ''
for item in list[0:-1]:
new_string += str(item)
new_string += ' , '
new_string += str(list[-1])
return new_string
def one_load_combination(line_name_obj, coord, defined_loads, load_condition,
defined_tanks, comb_name, acc, load_factors_all):
'''
Creating load combination.
Inserted into self.line_to_struc index = 4
"dnva", "line12", "static_ballast_10m"
#load combination dictionary (comb,line,load) : [stat - DoubleVar(), dyn - DoubleVar], on/off - IntVar()]
:return:
'''
if load_condition not in ['tanktest','manual', 'slamming']:
return helper_dnva_dnvb(line_name_obj, coord, defined_loads, load_condition,
defined_tanks, comb_name, acc, load_factors_all)
elif load_condition == 'tanktest' and comb_name == 'tanktest':
return helper_tank_test(line_name_obj, coord, defined_loads, load_condition,
defined_tanks, comb_name, acc, load_factors_all)
elif load_condition == 'manual':
return helper_manual(line_name_obj, comb_name,load_factors_all)
elif load_condition == 'slamming':
return helper_slamming(defined_loads)
else:
return [None, ' ']
def helper_dnva_dnvb(line_name_obj, coord, defined_loads, load_condition,
defined_tanks, comb_name, acc, load_factors_all):
# calculate the defined loads
calc_load, load_print, prt_conditions = [], ['',], []
line_name = line_name_obj[0]
structure_type = line_name_obj[1].get_structure_type()
# if line_name_obj[0] == 'line12':
# print('Load calculation for '+line_name_obj[0] + ' ' + comb_name+ ' ' + load_condition)
# if print_line != None and line_name == print_line:
# print('Load calculation for '+line_name_obj[0] + ' ' + comb_name+ ' ' + load_condition)
if len(defined_loads) != 0:
for load in defined_loads :
if comb_name+load_condition not in prt_conditions:
load_print.append("Loads for condition: " + load_condition + ' - ' + comb_name +' ' + '\n')
prt_conditions.append(comb_name+load_condition)
if load != None:
load_factors = load_factors_all[(comb_name, line_name, load.get_name())]
# if print_it:
# if load_factors[0].get() != 0:
# load_print.append('LOAD NAME: '+' '+ comb_name+ ' '+ line_name+' '+ load.get_name()+'\n')
# if load_factors[1].get() != 0:
# if load_factors[0].get() != 0:
# load_print.append('LOAD NAME: '+' '+ comb_name+ ' '+ line_name+' '+ load.get_name()+'\n')
# USE GET() (static,dyn, on/off)
if load_condition == load.get_load_condition():
static_pressure = (load_factors[2].get())*(load_factors[0].get())\
*load.get_calculated_pressure(coord, acc[0],structure_type)
dynamic_pressure = (load_factors[2].get())*(load_factors[1].get())\
*load.get_calculated_pressure(coord, acc[1],structure_type)
if print_it:
# load_print.append('load (NON-TANK) calculation for load condition:' + load_condition + ' - Load is: '+ \
# load.get_name() + ' - Type is: \n')
if load_factors[0].get() != 0:
load_print.append(' static with acceleration: '+ str(acc[0])+ ' is: \n '+
str(load_factors[2].get())+'*'+\
str(load_factors[0].get())+'*'+\
str(round(load.get_calculated_pressure(coord, acc[0],structure_type),1))+ ' = '+ \
str(round(static_pressure,1))+'\n')
if load_factors[1].get() != 0:
load_print.append(' dynamic with acceleration: '+ str(acc[1])+' is: \n '+
str(load_factors[2].get())+'*'+\
str(load_factors[1].get())+'*'+\
str(round(load.get_calculated_pressure(coord, acc[1],structure_type),1))+ ' = '+ \
str(round(dynamic_pressure,1))+'\n')
# if line_name_obj[0] == 'line12':
# print('Pressures',static_pressure,'+',dynamic_pressure,'=',static_pressure+dynamic_pressure)
calc_load.append(static_pressure+dynamic_pressure)
# calculate the tank loads
if len(defined_tanks) != 0:
temp_tank = {}
if comb_name + load_condition not in prt_conditions:
load_print.append("Loads for condition: " + load_condition + ' - ' + comb_name + ' ' + '\n')
prt_conditions.append(comb_name + load_condition)
for tank_name_obj in defined_tanks:
temp_tank[tank_name_obj[0]] = 0
load_factors = load_factors_all[(comb_name, line_name, tank_name_obj[0])]
overpress_lf = [1.3,0]# if load_factors[0].get()==1.2 else [1,1.3]
if load_condition == tank_name_obj[1].get_condition():
# USE GET() (static,dyn, on/off)
static_pressure = load_factors[2].get()*(load_factors[0].get())\
*tank_name_obj[1].get_calculated_pressure(coord,acc[0])\
+tank_name_obj[1].get_overpressure()*overpress_lf[0]
dynamic_pressure = load_factors[2].get()*load_factors[1].get()\
*tank_name_obj[1].get_calculated_pressure(coord,acc[1])\
+tank_name_obj[1].get_overpressure()*overpress_lf[1]
temp_tank[tank_name_obj[0]] = static_pressure + dynamic_pressure# .append((static_pressure + dynamic_pressure))
if print_it and tank_name_obj[0]+load_condition not in prt_conditions:
prt_conditions.append(tank_name_obj[0]+load_condition)
#load_print.append('load (TANK) calculation for load condition:'+ load_condition+ ' - Tank is: '+ tank_name_obj[0]+'\n')
#load_print.append('load factors : '+ str(load_factors[0].get())+str(load_factors[1].get())+str(load_factors[2].get())+'\n')
load_print.append('\n' + tank_name_obj[0] + ' - static: '+ str(load_factors[2].get())+ '*'+ str(load_factors[0].get()) + '*'+\
str(tank_name_obj[1].get_calculated_pressure(coord,acc[0]))+' + '+\
str(tank_name_obj[1].get_overpressure())+ '*'+str(overpress_lf[0])+ ' = '+str(static_pressure)+'\n')
load_print.append(tank_name_obj[0] + ' - dynamic: '+str(load_factors[2].get())+ '*'+ str(load_factors[1].get())+ '*'+\
str(tank_name_obj[1].get_calculated_pressure(coord, acc[1]))+' + '+\
str(tank_name_obj[1].get_overpressure())+ '*'+str(overpress_lf[1])+' = '+ str(dynamic_pressure)+'\n')
# choosing the tank with the highest pressures
if len(defined_loads) == 0:
line_tank_pressure_calc = max([pressure for pressure in temp_tank.values()])
#print('line_tank_pressure_calc', line_tank_pressure_calc)
highest_dnv_tank_pressure = tank_name_obj[1].get_tank_dnv_minimum_pressure(load_factors[0].get(),
load_factors[1].get())
#print('highest_dnv_tank_pressure', highest_dnv_tank_pressure)
line_dnv_tank_pressure = tank_name_obj[1].get_line_pressure_from_max_pressure(highest_dnv_tank_pressure,
coord)
#print('line_dnv_tank_pressure', line_dnv_tank_pressure)
# if line_name_obj[0] == 'line29':
# print('Tank load to append is max( ',highest_tank_pressure_calc,highest_dnv_tank_pressure,')')
highest_tank_pressure = max(line_tank_pressure_calc,line_dnv_tank_pressure)
calc_load.append(-highest_tank_pressure if highest_tank_pressure else 0)
load_print.append('\nDNVGL-OS-C101 4.3.7 and 4.3.8 (Tank pressures) = '+ str(highest_tank_pressure)+'\n')
else:
pass
if print_it:
if len(calc_load) == 2:
load_print.append('\nRESULT: ' + str(round(calc_load[0], 1)) +' + '+
str(round(calc_load[1])) + ' = ' + str(round(sum(calc_load),1)) +'\n')
elif len(calc_load) == 1:
load_print.append(
'\nRESULT: ' + str(round(calc_load[0],1))+'\n')
else:
pass
load_print.append('------------------------------------------------------------------\n')
# if line_name_obj[0] == 'line12':
# print('end')
return [int(abs(sum(calc_load))), load_print]
def helper_slamming(defined_loads):
# calculate the defined loads
calc_load, load_print = [], ['',]
if len(defined_loads) != 0:
for load in defined_loads:
if load != None and load.get_load_condition() == 'slamming':
load_print.append('Slamming pressure: \n'+ str(load.get_calculated_pressure(0, 0, 'slamming'))+ ' Pa \n')
return [load.get_calculated_pressure(0, 0, 'slamming'), load_print]
return [None, ' ']
def helper_tank_test(line_name_obj, coord, defined_loads, load_condition,
defined_tanks, comb_name, acc, load_factors_all):
# calculate the defined loads
calc_load, load_print = [], ['',]
static_pressure, dynamic_pressure = 0, 0
line_name = line_name_obj[0]
structure_type = line_name_obj[1].get_structure_type()
if len(defined_loads) != 0:
for load in defined_loads:
if load != None:
load_factors = load_factors_all[(comb_name, line_name, load.get_name())]
# USE GET() (static,dyn, on/off)
if load_condition == load.get_load_condition():
static_pressure = (load_factors[2].get()) * (load_factors[0].get()) \
* load.get_calculated_pressure(coord, acc[0], structure_type)
dynamic_pressure = (load_factors[2].get()) * (load_factors[1].get()) \
* load.get_calculated_pressure(coord, acc[1], structure_type)
calc_load.append(static_pressure + dynamic_pressure)
if print_it:
load_print.append(
'Tank test for: ' + load_condition[0] + '\n' + str(load_factors[2].get())+' * '+
str(load_factors[0].get()) +' * '+
str(round(load.get_calculated_pressure(coord, acc[0], structure_type),1)) + ' + ' +
str(round(dynamic_pressure)) + ' = ' + str(round(dynamic_pressure + static_pressure))+'\n')
# calculate the tank loads
temp_tank={}
if len(defined_tanks) != 0:
for tank_name_obj in defined_tanks:
temp_tank[tank_name_obj[0]] = []
for tank_name_obj in defined_tanks:
load_factors = load_factors_all[(comb_name, line_name, tank_name_obj[0])]
# if print_it:
# load_print.append('Tank test LF: '+ str(load_factors[0].get())+' '+str(load_factors[1].get())+' '+
# str(load_factors[2].get())+'\n')
# USE GET() (static,dyn, on/off)
overpress_lf = [1.3, 0] if load_factors[0].get() == 1.2 else [1, 0]
static_pressure = (load_factors[2].get()) * (load_factors[0].get())\
* tank_name_obj[1].get_calculated_pressure(coord, acc[0])\
+tank_name_obj[1].get_overpressure()*overpress_lf[0]
dynamic_pressure = (load_factors[2].get()) * (load_factors[1].get())\
* tank_name_obj[1].get_calculated_pressure(coord, acc[1])\
+tank_name_obj[1].get_overpressure()*overpress_lf[1]
temp_tank[tank_name_obj[0]].append((static_pressure + dynamic_pressure))
if print_it:
load_print.append(
'Tank test for: ' + tank_name_obj[0] + '\n' + str(load_factors[2].get()) + ' * ' +
str(load_factors[0].get()) + ' * ' +
str(round(tank_name_obj[1].get_calculated_pressure(coord, acc[0]), 1)) + ' + ' +
str(tank_name_obj[1].get_overpressure()) +' * ' + str(overpress_lf[0]) +
' = ' + str(round(dynamic_pressure + static_pressure)) + '\n')
# choosing the tank with the highest pressures
if len(defined_tanks) != 0:
highest_tank_pressure = max([temp_tank[tank[0]] for tank in defined_tanks])
calc_load.append(-highest_tank_pressure[0] if len(highest_tank_pressure) > 0 else 0)
else:
pass
return [int(abs(sum(calc_load))), load_print]
def helper_manual(line_name, comb_name,load_factors_all):
calc_load, load_print = [], ['',]
if (comb_name, line_name[0], 'manual') not in load_factors_all.keys():
return [0, 'Manual pressure: 0']
load_factors = load_factors_all[(comb_name, line_name[0], 'manual')]
man_press = load_factors[0].get() * load_factors[1].get() * load_factors[2].get()
if print_it:
load_print.append('Manual pressure:\n'+ str(load_factors[0].get())+' * '+ str(load_factors[1].get())+' * '+
str(load_factors[2].get()) + ' = '+ str(man_press) +'\n')
return [man_press, load_print]
def helper_read_section_file(files, obj = None, to_json = False, to_csv = None):
''' Read a xml file. '''
import json
from xml.dom import minidom
to_return_final, to_return, return_csv = list(), dict(), list()
if type(files) != list:
files = [files,]
for file in files:
if file.endswith('xml'):
xmldoc = minidom.parse(file)
sectionlist = xmldoc.getElementsByTagName('section')
sec_types = ('unsymmetrical_i_section', 'l_section', 'bar_section')
for idx, sec_type in enumerate(sec_types):
sec_type_get = xmldoc.getElementsByTagName(sec_type)
if sec_types == []:
continue
for item, itemdata in zip(sectionlist, sec_type_get):
if sec_type == sec_types[0]:
stf_web_h, stf_web_thk = 'h', 'tw'
stf_flange_width, stf_flange_thk = 'bfbot', 'tfbot'
stiffener_type = 'T'
mult = 1/1000
elif sec_type == sec_types[1]:
stf_web_h, stf_web_thk = 'h', 'tw'
stf_flange_width, stf_flange_thk = 'b', 'tf'
stiffener_type = 'L'
mult = 1/1000
elif sec_type == sec_types[2]:
stf_web_h, stf_web_thk = 'h', 'b'
stf_flange_width, stf_flange_thk = None, None
stiffener_type = 'FB'
mult = 1 / 1000
section_name = item.getAttribute('name')
to_return[section_name] = {'stf_web_height': [float(itemdata.getAttribute(stf_web_h)) *mult, 'm'],
'stf_web_thk': [float(itemdata.getAttribute(stf_web_thk)) *mult,'m'],
'stf_flange_width': [0 if stf_flange_width is None else
float(itemdata.getAttribute(stf_flange_width)) *mult,'m'],
'stf_flange_thk': [0 if stf_flange_thk is None else
float(itemdata.getAttribute(stf_flange_thk)) *mult, 'm'],
'stf_type': [stiffener_type, '']}
return_csv.append([to_return[section_name][var][0] for var in ['stf_web_height', 'stf_web_thk',
'stf_flange_width', 'stf_flange_thk',
'stf_type']])
if to_json:
with open('sections.json', 'w') as file:
json.dump(to_return, file)
if to_csv:
with open('sections.csv', 'w', newline='') as file:
section_writer = csv.writer(file)
for line in return_csv:
section_writer.writerow(line)
elif file.endswith('json'):
with open(file, 'r') as json_file:
to_return = json.load(json_file)
elif file.endswith('csv'):
with open(file, 'r') as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
for idx, section in enumerate(csv_reader):
if section[4] in ['L-bulb', 'bulb', 'hp']:
to_return[str(idx)] = {'stf_web_height': [float(section[0]) - float(section[3]), 'm'],
'stf_web_thk': [float(section[1]),'m'],
'stf_flange_width': [float(section[2]),'m'],
'stf_flange_thk': [float(section[3]), 'm'],
'stf_type': [section[4], '']}
else:
to_return[str(idx)] = {'stf_web_height': [float(section[0]), 'm'],
'stf_web_thk': [float(section[1]),'m'],
'stf_flange_width': [float(section[2]),'m'],
'stf_flange_thk': [float(section[3]), 'm'],
'stf_type': [section[4], '']}
if to_json:
with open('sections.json', 'w') as file:
json.dump(to_return, file)
if to_csv is not None:
with open(to_csv, 'w', newline = '') as file:
section_writer = csv.writer(file)
for line in return_csv:
section_writer.writerow(line)
if obj is not None: # This will return a modified object.
if type(obj) is not list:
obj = [obj, ]
append_list = [[],]
else:
append_list = [list() for dummy in obj]
else:
append_list = list()
for key, value in to_return.items():
if obj is not None: # This will return a modified object.
for idx, iter_obj in enumerate(obj):
new_obj = copy.deepcopy(iter_obj)
new_obj_prop = new_obj.get_structure_prop()
for prop_name, prop_val in value.items():
new_obj_prop[prop_name] = prop_val
new_obj.set_main_properties(new_obj_prop)
append_list[idx].append(new_obj)
else:
to_return_final.append(value)
if len(append_list) == 1:
to_return_final = append_list[0]
elif len(append_list) == 0:
pass
elif len(append_list) > 1:
to_return_final = append_list
return to_return_final
def open_example_file(root_path = None):
import os
if os.path.isfile('sections.csv'):
os.startfile('sections.csv')
else:
os.startfile(root_path + '/' + 'sections.csv')
def add_new_section(section_list, new_section):
''' Checking if a section is already in the list. '''
existing_section = False
for section in section_list:
if section.__str__() == new_section.__str__():
existing_section = True
if existing_section == False:
# print('The new section', new_section)
# print('The section list', section_list)
section_list.append(new_section)
return section_list
def plot_weights(time_stamp = None, cog = None, structure = None, weight = None):
if __name__ == '__main__':
cog = [[22.15329254, 12.24742235],
[22.1937807, 12.1975691],
[22.24684556, 12.15423614],
[22.27489223, 12.09378247],
[22.29086617, 12.03458725],
[22.29559601, 11.97667798],
[22.58758899, 11.739118],
[22.34550004, 11.936077],
[22.39332625, 11.96360235],
[22.43016887, 11.99128875],
[22.29320631, 12.02004097],
[22.2458229, 11.99243978],
[22.20984338, 11.96499817]]
cog = np.array(cog)
structure = {'plates': [18.0, 25.0, 12.0, 20.0, 14.0, 30.0, 15.0],
'beams': ['T_400_0x12_0__200_0x20_0', 'T_400_0x12_0__250_0x14_0', 'T_400_0x12_0__250_0x12_0',
'T_400_0x12_0__200_0x18_0', 'T_400_0x12_0__150_0x20_0', 'T_500_0x12_0__150_0x20_0',
'T_340_0x12_0__200_0x20_0', 'T_340_0x12_0__150_0x16_0', 'T_250_0x12_0__150_0x14_0',
'T_450_0x12_0__150_0x20_0', 'T_375_0x12_0__150_0x18_0', 'T_500_0x12_0__150_0x25_0',
'T_325_0x12_0__150_0x16_0', 'FB_250_0x18_0', 'FB_400_0x18_0',
'T_350_0x12_0__150_0x20_0', 'T_320_0x12_0__150_0x20_0', 'T_300_0x12_0__150_0x20_0']}
time_stamp = [18920.477643045164, 18920.477684256162, 18920.477721255855, 18920.477761896746, 18920.477798285963,
18920.477841150896, 18920.4778763735, 18920.477939357952, 18920.47800752034, 18920.47808087777,
18920.478203353003, 18920.478237156338, 18920.47826686926]
weight = [0.97156037, 0.97553128, 0.979408, 0.97625964, 0.97319636, 0.97021818,
1., 0.97518182, 0.97234545, 0.96950909, 0.97546545, 0.97830182,
0.98113818]
import matplotlib.dates as mdate
from matplotlib import pyplot as plt
from matplotlib.gridspec import GridSpec
pl_and_bm = [['', ''] for dummy in range(max(len(list(structure.values())[0]), len(list(structure.values())[1])))]
for key, value in structure.items():
for idx, val in enumerate(value):
if key == 'plates':
pl_and_bm[idx][0] = str(val) + ' mm'
else:
pl_and_bm[idx][1] = val
fig = plt.figure(figsize=(14, 8))
gs = GridSpec(2, 3, figure=fig)
time_stamp = [mdate.epoch2num(val) for val in time_stamp]
ax3 = plt.subplot(gs[1, 0:2])
plt.plot(time_stamp, weight, 'tab:green')
ax1 = plt.subplot(gs[0, 0], sharex=ax3)
plt.plot(time_stamp, cog[:, 0])
ax2 = plt.subplot(gs[0, 1], sharex=ax3)
plt.plot(time_stamp, cog[:, 1], 'tab:orange')
ax4 = plt.subplot(gs[0:2, 2])
ax4.set_axis_off()
table1 = plt.table(cellText=pl_and_bm, colLabels = ['Plates in model', 'Beams in model'],loc='center')
table1.auto_set_column_width((0,1))
table1.scale(1,1.5)
# ax5 = plt.subplot(gs[0:2, 3])
# ax5.set_axis_off()
# table2 = plt.table(cellText=structure['beams'], colLabels = ['Beams in model'],loc='center')
# table2.scale(1,1.5)
# table2.auto_set_column_width(0)
# Choose your xtick format string
date_fmt = '%d-%m-%y %H:%M:%S'
# Use a DateFormatter to set the data to the correct format.
date_formatter = mdate.DateFormatter(date_fmt)
ax1.xaxis.set_major_formatter(date_formatter)
ax2.xaxis.set_major_formatter(date_formatter)
ax3.xaxis.set_major_formatter(date_formatter)
ax1.set_title('COG X')
ax2.set_title('COG Y')
ax3.set_title('Total weight / max(total weight)')
fig.suptitle('Developement of weight and COG')
# Sets the tick labels diagonal so they fit easier.
fig.autofmt_xdate()
plt.tight_layout()
plt.show()
def helper_cylinder_stress_to_force_to_stress(stresses = None, forces = None, geometry = None, shell_t = 0,
shell_radius = 0, shell_spacing = 0,
hw = 0, tw = 0, b = 0, tf = 0, CylinderAndCurvedPlate = None,
conical = False, psd = 0, cone_r1 = 0, cone_r2 = 0, cone_alpha = 0,
shell_lenght_l = 0):
A = 0 if geometry in [1, 2] else hw * tw + b * tf
eq_thk = shell_t if geometry in [1, 2] else shell_t + A/shell_spacing
Itot = CylinderAndCurvedPlate.get_Itot(hw=0 if geometry in [1, 2] else hw,
tw=0 if geometry in [1, 2] else tw,
b=0 if geometry in [1, 2] else b,
tf=0 if geometry in [1, 2] else tf,
r=shell_radius,
s=shell_spacing,
t=shell_t)
if forces is not None and stresses is None:
if not conical:
Nsd, Msd, Tsd, Qsd = forces
sasd = (Nsd / 2) / (math.pi * shell_radius * eq_thk) * 1000
smsd = (Msd/ Itot) * \
(shell_radius + shell_t / 2) * 1000000
tTsd = (Tsd* 10 ** 6) / (2 * math.pi * shell_t * math.pow(shell_radius, 2))
tQsd = Qsd / (math.pi * shell_radius * shell_t) * 1000
shsd = 0
return sasd, smsd, tTsd, tQsd, shsd
else:
Nsd, M1sd, M2sd, Tsd, Q1sd, Q2sd = forces
re = (cone_r1+cone_r2) / (2*math.cos(math.radians(cone_alpha)))
le = shell_lenght_l / math.cos(math.radians(cone_alpha))
te = shell_t *math.cos(math.radians(cone_alpha))
sasd = psd*re/2*te + Nsd/(2*math.pi*re*te) * 1000
smsd = ((M1sd*math.sin(math.radians(cone_alpha)) / (math.pi*math.pow(re,2)*te)) + \
(M2sd*math.cos(math.radians(cone_alpha)) / (math.pi*math.pow(re,2)*te))) * 1000000
shsd = psd*re/te
tTsd = Tsd/(2*math.pi*math.pow(re,2)*te)
tQsd = -(Q1sd*math.cos(math.radians(cone_alpha)) / (math.pi*re*te)) + \
(Q2sd*math.sin(math.radians(cone_alpha)) / (math.pi*re*te))
return sasd, smsd, tTsd, tQsd, shsd
else:
if not conical:
sasd, smsd, tTsd, tQsd, shsd = stresses
Nsd = (sasd * 2 * math.pi * shell_radius * eq_thk) / 1000
Msd = (smsd / (shell_radius * shell_t / 2)) * Itot / 1000000
Tsd = tTsd * 2 * math.pi * shell_t * math.pow(shell_radius, 2) / 1000000
Qsd = tQsd * math.pi * shell_radius * shell_t / 1000
else:
re = (cone_r1+cone_r2) / (2*math.cos(math.radians(cone_alpha)))
le = shell_lenght_l / math.cos(math.radians(cone_alpha))
te = shell_t *math.cos(math.radians(cone_alpha))
Itot = CylinderAndCurvedPlate.get_Itot(hw=0,
tw=0 ,
b=0 ,
tf=0,
r=re,
s=shell_spacing,
t=te)
sasd, smsd, tTsd, tQsd, shsd = stresses
Nsd = (sasd * 2 * math.pi * re * te) / 1000
Msd = (smsd / (re * te / 2)) * Itot / 1000000
Tsd = tTsd * 2 * math.pi * te * math.pow(re, 2) / 1000000
Qsd = tQsd * math.pi * re * te/ 1000
return Nsd, Msd, Tsd, Qsd, shsd
if __name__ == '__main__':
from tkinter import *
class AllTkinterWidgets:
def __init__(self, master):
frame = Frame(master, width=500, height=400, bd=1)
frame.pack()
iframe5 = Frame(frame, bd=2, relief=RAISED)
iframe5.pack(expand=1, fill=X, pady=10, padx=5)
c = Canvas(iframe5, bg='white', width=340, height=200)
c.pack()
height = 150
radius = 150
offset_oval = 30
start_x_cyl = 150
start_y_cyl = 20
coord1 = start_x_cyl, start_y_cyl, start_x_cyl + radius, offset_oval
coord2 = start_x_cyl, start_y_cyl + height, start_x_cyl + radius, offset_oval+ height
arc_1 = c.create_oval(coord1, width = 5, fill = 'grey90')
arc_2 = c.create_arc(coord2, extent = 180, start = 180,style=ARC, width = 3)
line1 = c.create_line(coord1[0], coord1[1]+offset_oval/4,
coord1[0], coord1[1]+height+offset_oval/4,
width = 3)
line2 = c.create_line(coord1[0]+radius, coord1[1]+offset_oval/4,
coord1[0]+radius, coord1[1]+height+offset_oval/4,
width = 3)
num_stf = 10
for line_num in range(1,num_stf,1):
angle = 180 - 180/(num_stf) *line_num
arc_x, arc_y = 1*math.cos(math.radians(angle)), 0.5*math.sin(math.radians(angle))
arc_x = (arc_x + 1)/2
line1 = c.create_line(coord1[0] + radius*arc_x,
coord1[1] +2*arc_y*offset_oval/3,
coord1[0] + radius*arc_x,
coord1[1] + height +2*arc_y*offset_oval/3,fill = 'blue')
num_ring_stiff = 5
for ring_stf in range(1,num_ring_stiff+1,1):
coord3 = coord1[0], coord1[1]+(height/(num_ring_stiff+1))*ring_stf, \
start_x_cyl +radius, coord1[3]+ (height/(num_ring_stiff+1))*ring_stf,
arc_2 = c.create_arc(coord3, extent=180, start=180, style=ARC, width=2,fill = 'orange', outline = 'orange')
num_ring_girder = 1
for ring_girder in range(1, num_ring_girder+1,1):
coord3 = coord1[0], coord1[1]+(height/(num_ring_girder+1))*ring_girder, \
start_x_cyl+ radius, coord1[3]+ (height/(num_ring_girder+1))*ring_girder,
arc_2 = c.create_arc(coord3, extent=180, start=180, style=ARC, width=4, fill = 'grey', outline = 'grey')
iframe5.pack(expand=1, fill=X, pady=10, padx=5)
root = Tk()
# root.option_add('*font', ('verdana', 10, 'bold'))
all = AllTkinterWidgets(root)
root.title('Tkinter Widgets')
root.mainloop() | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/helper.py | helper.py |
import numpy as np
import itertools as it
import time
import random
import copy
from multiprocessing import Pool, cpu_count
import math
from math import floor
from matplotlib import pyplot as plt
from tkinter.filedialog import asksaveasfilename
import csv
try:
import any_files.calc_structure as calc
import any_files.helper as hlp
except ModuleNotFoundError:
import ANYstructure.any_files.calc_structure as calc
import ANYstructure.any_files.helper as hlp
def run_optmizataion(initial_structure_obj=None, min_var=None, max_var=None, lateral_pressure=None,
deltas=None, algorithm='anysmart', trials=30000, side='p',
const_chk = (True,True,True,True,True,True, True, False, False, False),
pso_options = (100,0.5,0.5,0.5,100,1e-8,1e-8), is_geometric=False, fatigue_obj = None ,
fat_press_ext_int = None,
min_max_span = (2,6), tot_len = None, frame_height = 2.5, frame_distance = None,
slamming_press = 0, predefined_stiffener_iter = None, processes = None, use_weight_filter = True,
load_pre = False, opt_girder_prop = None, puls_sheet = None, puls_acceptance = 0.87,
fdwn = 1, fup = 0.5, ml_algo = None, cylinder = False):
'''
The optimazation is initiated here. It is called from optimize_window.
:param initial_structure_obj:
:param min_var:
:param max_var:
:param lateral_pressure:
:param deltas:
:param algorithm:
:param init_weigth:
:param pso_options:
:return:
'''
init_filter_weight = float('inf')
if is_geometric:
fat_dict = [None if this_fat is None else this_fat.get_fatigue_properties() for this_fat in fatigue_obj]
else:
fat_dict = None if fatigue_obj is None else fatigue_obj.get_fatigue_properties()
if use_weight_filter and not cylinder:
if is_geometric or algorithm == 'pso':
init_filter_weight = float('inf')
else:
predefined_stiffener_iter = None if predefined_stiffener_iter is None else predefined_stiffener_iter
init_filter_weight = get_initial_weight(obj=initial_structure_obj,
lat_press=lateral_pressure,
min_var=min_var, max_var=max_var, deltas=deltas,
trials= 30000 if predefined_stiffener_iter is None else
len(predefined_stiffener_iter),
fat_dict=fat_dict,
fat_press=None if fat_press_ext_int is None else fat_press_ext_int,
predefined_stiffener_iter = predefined_stiffener_iter,
slamming_press=slamming_press, fdwn = fdwn, fup = fup,
ml_algo = ml_algo)
if cylinder:
to_return = any_smart_loop_cylinder(min_var=min_var, max_var=max_var, deltas=deltas,
initial_structure_obj=initial_structure_obj,
use_weight_filter = use_weight_filter,
predefiened_stiffener_iter=predefined_stiffener_iter)
return to_return
elif algorithm == 'anysmart' and not is_geometric:
to_return = any_smart_loop(min_var, max_var, deltas, initial_structure_obj, lateral_pressure,
init_filter_weight, side=side, const_chk=const_chk, fat_dict=fat_dict,
fat_press=fat_press_ext_int,slamming_press=slamming_press,
predefiened_stiffener_iter=predefined_stiffener_iter, puls_sheet = puls_sheet,
puls_acceptance = puls_acceptance, fdwn = fdwn, fup = fup, ml_algo=ml_algo)
return to_return
elif algorithm == 'anysmart' and is_geometric:
return geometric_summary_search(min_var= min_var, max_var=max_var, deltas= deltas,
initial_structure_obj= initial_structure_obj, lateral_pressure=lateral_pressure,
init_filter= init_filter_weight, side= side, const_chk= const_chk,
fat_obj= fatigue_obj, fat_press= fat_press_ext_int, min_max_span= min_max_span,
tot_len= tot_len, frame_distance = frame_distance,
algorithm= 'anysmart', predefiened_stiffener_iter=predefined_stiffener_iter,
slamming_press = slamming_press, load_pre = load_pre,
opt_girder_prop = opt_girder_prop, ml_algo=ml_algo)
elif algorithm == 'anydetail' and not is_geometric:
return any_optimize_loop(min_var, max_var, deltas, initial_structure_obj, lateral_pressure,init_filter_weight,
side=side, const_chk=const_chk, fat_dict=fat_dict, fat_press=fat_press_ext_int,
slamming_press=slamming_press)
elif algorithm == 'random' and not is_geometric:
return get_random_result(initial_structure_obj,lateral_pressure,min_var,max_var,deltas,trials=trials,
side=side, const_chk=const_chk, fat_dict=fat_dict,fat_press=fat_press_ext_int)
elif algorithm == 'random_no_delta' and not is_geometric:
return get_random_result_no_bounds(initial_structure_obj, lateral_pressure, min_var, max_var, trials=trials,
side=side, const_chk=const_chk)
# elif algorithm == 'pso' and is_geometric:
# return geometric_summary_search(min_var,max_var,deltas, initial_structure_obj,lateral_pressure,
# init_filter_weight,side,const_chk,pso_options,fatigue_obj,fat_press_ext_int,
# min_max_span,tot_len,frame_height,frame_cross_a, 'pso')
else:
return None
def any_optimize_loop(min_var,max_var,deltas,initial_structure_obj,lateral_pressure, init_filter = float('inf'),
side='p',const_chk=(True,True,True,True,True,False), fat_dict = None, fat_press = None,
slamming_press = 0):
'''
Calulating initial values.
:param min:
:param max:
:return:
'''
ass_var = []
plot_x,plot_y = [],[]
plt.xlabel('#')
plt.ylabel('weigth [kg]')
plt.title('ANYdetail brute force results')
plt.grid(True)
plt.draw()
iter_count = 0
min_weight = init_filter
main_fail = list()
for spacing in np.arange(min_var[0],max_var[0]+deltas[0],deltas[0]):
for plate_thk in np.arange(min_var[1],max_var[1]+deltas[1],deltas[1]):
for stf_web_h in np.arange(min_var[2],max_var[2]+deltas[2],deltas[2]):
for stf_web_thk in np.arange(min_var[3],max_var[3]+deltas[3],deltas[3]):
for stf_flange_width in np.arange(min_var[4],max_var[4]+deltas[4],deltas[4]):
for stf_flange_thk in np.arange(min_var[5],max_var[5]+deltas[5],deltas[5]):
var_x = np.array([spacing, plate_thk, stf_web_h, stf_web_thk, stf_flange_width,
stf_flange_thk,min_var[6],min_var[7]])
check = any_constraints_all(var_x,initial_structure_obj,lat_press=lateral_pressure,
init_weight=min_weight,side=side,chk=const_chk,
fat_dict = fat_dict, fat_press = fat_press,
slamming_press=slamming_press)
if check[0] is not False:
current_weight = calc_weight(var_x)
if current_weight <= min_weight:
iter_count+=1
min_weight = current_weight
ass_var = var_x
main_fail.append(check)
else:
main_fail.append(check)
if ass_var is None:
return None, None, None, False, main_fail
new_struc_obj = create_new_structure_obj(initial_structure_obj,[item for item in ass_var])
new_calc_obj = create_new_calc_obj(initial_structure_obj,[item for item in ass_var])[0]
return new_struc_obj, new_calc_obj, fat_dict, True, main_fail
def any_smart_loop(min_var,max_var,deltas,initial_structure_obj,lateral_pressure, init_filter = float('inf'),
side='p',const_chk=(True,True,True,True,True,True,True, False, False,False), fat_dict = None,
fat_press = None,
slamming_press = 0, predefiened_stiffener_iter = None, processes = None,
puls_sheet = None, puls_acceptance = 0.87, fdwn = 1, fup = 0.5, ml_algo = None):
'''
Trying to be smart
:param min_var:
:param max_var:
:param initial_structure:
:return:
'''
initial_structure_obj.lat_press = lateral_pressure
if predefiened_stiffener_iter is None:
structure_to_check = any_get_all_combs(min_var, max_var, deltas)
else:
structure_to_check = any_get_all_combs(min_var, max_var, deltas,predef_stiffeners=[item.get_tuple() for item
in predefiened_stiffener_iter])
main_result = get_filtered_results(structure_to_check, initial_structure_obj,lateral_pressure,
init_filter_weight=init_filter, side=side,chk=const_chk, fat_dict=fat_dict,
fat_press=fat_press, slamming_press=slamming_press, processes=processes,
puls_sheet = puls_sheet, puls_acceptance = puls_acceptance, ml_algo=ml_algo)
main_iter = main_result[0]
main_fail = main_result[1]
ass_var=None
current_weight = float('inf')
for item in main_iter:
main_fail.append(item)
item_weight = calc_weight(item[2])
if item_weight < current_weight:
ass_var = item[2]
current_weight = item_weight
if ass_var == None:
return None, None, None, False, main_fail
if len(ass_var) == 8:
ass_var = [round(item, 10) for item in ass_var[0:8]]
else:
ass_var = [round(item, 10) for item in ass_var[0:8]] + [ass_var[8]]
# initial_structure_obj.Plate = create_new_structure_obj(initial_structure_obj.Plate, ass_var,
# fdwn = fdwn, fup = fup)
# initial_structure_obj.Stiffener = create_new_structure_obj(initial_structure_obj.Stiffener, ass_var,
# fdwn=fdwn, fup=fup)
calc_object_stf = None if initial_structure_obj.Stiffener is None \
else create_new_calc_obj(initial_structure_obj.Stiffener, ass_var,
fat_dict, fdwn=fdwn, fup=fup)
calc_object_pl = create_new_calc_obj(initial_structure_obj.Plate, ass_var, fat_dict,
fdwn=fdwn, fup=fup)
calc_object = calc.AllStructure(Plate=calc_object_pl[0],
Stiffener=None if initial_structure_obj.Stiffener is None else calc_object_stf[0],
Girder=None,
main_dict=initial_structure_obj.get_main_properties()['main dict'])
calc_object.lat_press = lateral_pressure
return calc_object, fat_dict, True, main_fail
def any_smart_loop_cylinder(min_var,max_var,deltas,initial_structure_obj,lateral_pressure = None,
init_filter = float('inf'),
side='p',const_chk=(True,True,True,True,True,True,True, False, False,False), fat_dict = None,
fat_press = None, slamming_press = 0, predefiened_stiffener_iter = None, processes = None,
fdwn = 1, fup = 0.5, ml_algo = None, use_weight_filter = True):
combs = list()
# TODO first optmize for long then ring components. Find the overall smallest weight.
# Creating the individual combinations for Shell, LongStf, RingStf and RingFrame
for idx, str_type in enumerate(range(len(min_var))):
if sum(min_var[idx]) == 0:
structure_to_check = [(0, 0, 0, 0, 0, 0, 0, 0),]
else:
if any([predefiened_stiffener_iter is None, idx == 0]):
initial_structure_obj.LongStfObj.stiffener_type = 'T'
structure_to_check = any_get_all_combs(min_var[idx], max_var[idx], deltas[idx])
else:
structure_to_check = any_get_all_combs(min_var[idx], max_var[idx], deltas[idx],
predef_stiffeners= [item.get_tuple() for item in
predefiened_stiffener_iter])
# TODO add stifffener type
# [list(item).append('T') for item in structure_to_check]
# [tuple(item) for item in structure_to_check]
combs.append(structure_to_check)
# Combining the individual components.
final_comb, iter_vals = list(), list()
for shell in combs[0]:
for long in combs[1]:
for ring_stf in combs[2]:
for ring_frame in combs[3]:
final_comb.append([[shell, long, ring_stf, ring_frame], initial_structure_obj])
# print('All combs', len(combs[0]),len(combs[1]),len(combs[2]),len(combs[3]),len(final_comb))
# quit()
# Weight filter
min_weight = float('inf')
if use_weight_filter:
to_check = [random.choice(final_comb) + [float('inf')] for dummy in range(10000)]
with Pool(processes=max(cpu_count() - 1, 1)) as my_process:
res_pre = my_process.starmap(any_constraints_cylinder, to_check)
for chk_res in res_pre :
if chk_res[0]:
current_weight = calc_weight_cylinder(chk_res[2])
if current_weight < min_weight:
min_weight = current_weight
else:
min_weight = False
final_comb_inc_weight = list()
for val in final_comb:
final_comb_inc_weight.append(val + [min_weight])
t1 = time.time()
with Pool(processes = max(cpu_count()-1,1)) as my_process:
res_pre = my_process.starmap(any_constraints_cylinder, final_comb_inc_weight)
check_ok, check_not_ok = list(), list()
for item in res_pre:
if item[0] is False:
check_not_ok.append(item)
else:
check_ok.append(item)
main_iter = check_ok
main_fail = check_not_ok
ass_var = None
current_weight = float('inf')
for item in main_iter:
main_fail.append(item)
item_weight = calc_weight_cylinder(item[2])
if item_weight < current_weight:
ass_var = item[2]
current_weight = item_weight
if ass_var == None:
return None, None, None, False, main_fail
new_cylinder_obj = create_new_cylinder_obj(initial_structure_obj, ass_var)
# Checking ring stiffeners and frames
#return new_struc_obj, new_calc_obj, fat_dict, True, main_fail
return new_cylinder_obj, main_fail
def any_smart_loop_geometric(min_var,max_var,deltas,initial_structure_obj,lateral_pressure, init_filter = float('inf'),
side='p',const_chk=(True,True,True,True,True,True), fat_obj = None, fat_press = None,
slamming_press = None, predefiened_stiffener_iter=None, processes = None, ml_algo = None):
''' Searching multiple sections using the smart loop. '''
all_obj = []
idx = 0
for struc_obj, lat_press, fatigue_obj, fatigue_press, slam_press in zip(initial_structure_obj, lateral_pressure,
fat_obj, fat_press, slamming_press):
#print(predefiened_stiffener_iter)
if predefiened_stiffener_iter is not None:
this_predefiened_objects = hlp.helper_read_section_file(predefiened_stiffener_iter, struc_obj)
else:
this_predefiened_objects = None
opt_obj = any_smart_loop(min_var = min_var,max_var = max_var,deltas = deltas,initial_structure_obj = struc_obj,
lateral_pressure = lat_press, init_filter = init_filter, side=side,
const_chk=const_chk,
fat_dict = None if fatigue_obj is None else fatigue_obj.get_fatigue_properties(),
fat_press = None if fatigue_press is None else fatigue_press,
slamming_press = 0 if slam_press is None else slam_press,
predefiened_stiffener_iter=this_predefiened_objects, processes=processes,
ml_algo=ml_algo)
all_obj.append(opt_obj)
idx += 1
return all_obj
def geometric_summary_search(min_var=None,max_var=None,deltas = None, initial_structure_obj=None,lateral_pressure=None,
init_filter = float('inf'),side='p',const_chk=(True,True,True,True, True, True),
pso_options=(100,0.5,0.5,0.5,100,1e-8,1e-8), fat_obj = None, fat_press = None,
min_max_span = (2,6), tot_len = None, frame_distance = None,
algorithm = 'anysmart', predefiened_stiffener_iter=None, reiterate = True,
processes = None, slamming_press = None, load_pre = False, opt_girder_prop = None,
ml_algo = None):
'''Geometric optimization of all relevant sections. '''
# Checking the number of initial objects and adding if number of fraction is to be changed.
# print('Min/max span is', min_max_span)
found_max, found_min = False, False
for frames in range(1,100):
frame_count = frames
if tot_len/frames <= min_max_span[1] and found_min is False:
min_frame_count = frame_count - 1
found_min = True
if tot_len/frames <= min_max_span[0] and found_max is False:
max_frame_count = frame_count - 1
found_max = True
if found_min and found_max:
break
results = {}
# print('Frame count min/max: ', min_frame_count, max_frame_count)
# print('Initial objects: ', [print(type(obj)) for obj in initial_structure_obj])
# print('Initial lateral: ', lateral_pressure)
working_objects = {}
working_lateral = {}
working_fatigue = {}
working_fatigue_press = {}
working_slamming = {}
for no_of_fractions in range(min_frame_count+1, max_frame_count+1):
# Create fraction varables
frac_var,min_frac,max_frac = [], [], []
for var in range(no_of_fractions):
# Frame height is a interpolation between height at start and end.
frac_var.append(1/no_of_fractions)
working_objects[no_of_fractions] = list(initial_structure_obj)
working_lateral[no_of_fractions] = list(lateral_pressure)
working_fatigue[no_of_fractions] = list(fat_obj)
working_fatigue_press[no_of_fractions] = list(fat_press)
working_slamming[no_of_fractions] = list(slamming_press)
similar_count = len(working_objects[no_of_fractions])
tick_tock = True
while similar_count != no_of_fractions*2:
if similar_count > no_of_fractions*2:
for var_dict in [working_objects, working_lateral, working_fatigue,
working_fatigue_press, working_slamming]:
if tick_tock:
lower_idx = 0
upper_idx = int(floor(len(working_objects[no_of_fractions]) / 2))
tick_tock = False
else:
lower_idx = int(len(working_objects[no_of_fractions]) / 2) - 1
upper_idx = -1
tick_tock = True
var_dict[no_of_fractions].pop(lower_idx)
var_dict[no_of_fractions].pop(upper_idx)
similar_count -= 2
else:
if tick_tock:
lower_idx = 0
upper_idx = int(len(working_objects[no_of_fractions])/2)
tick_tock = False
else:
lower_idx = int(len(working_objects[no_of_fractions])/2) - 1
upper_idx = -1
tick_tock = True
#print(no_of_fractions, int(ceil(len(working_objects[no_of_fractions])/2)))
obj_start, obj_stop = copy.deepcopy(working_objects[no_of_fractions][lower_idx]),\
copy.deepcopy(working_objects[no_of_fractions][upper_idx])
fat_obj_start, fat_obj_stop = copy.deepcopy(working_fatigue[no_of_fractions][lower_idx]), \
copy.deepcopy(working_fatigue[no_of_fractions][upper_idx])
lat_start, lat_stop = working_lateral[no_of_fractions][lower_idx], \
working_lateral[no_of_fractions][upper_idx]
fat_press_start, fat_press_stop = working_fatigue_press[no_of_fractions][lower_idx], \
working_fatigue_press[no_of_fractions][upper_idx]
slam_start, slam_stop = working_slamming[no_of_fractions][lower_idx], \
working_slamming[no_of_fractions][upper_idx]
# if no_of_fractions == 11:
# print('Tick/tock', tick_tock, 'lower/opper idx', lower_idx, upper_idx)
for work, work_input in zip([working_objects[no_of_fractions], working_lateral[no_of_fractions],
working_fatigue[no_of_fractions],
working_fatigue_press[no_of_fractions],
working_slamming[no_of_fractions]],
[(obj_start, obj_stop), (lat_start, lat_stop),
(fat_obj_start, fat_obj_stop), (fat_press_start, fat_press_stop),
(slam_start, slam_stop)]):
# First iteration tick_tock true, second tick_tock false
if not tick_tock:
lower_idx = lower_idx
upper_idx = upper_idx + 1
else:
lower_idx = lower_idx + 1
upper_idx = -1
work.insert(lower_idx, work_input[0])
work.insert(upper_idx, work_input[1])
similar_count += 2
# if no_of_fractions == 11:
# [print(item.get_structure_type()) for item in working_objects[no_of_fractions]]
# print('')
for no_of_fractions, struc_objects in working_objects.items():
for struc_obj in struc_objects:
struc_obj.Plate.set_span(tot_len/no_of_fractions)
struc_obj.Stiffener.set_span(tot_len / no_of_fractions)
solution_found, iterations = False, 0
while not solution_found:
iterations += 1
if iterations != 1:
min_var[0:6] += deltas/2
max_var[0:6] -= deltas/2
if algorithm == 'anysmart':
if load_pre:
import pickle
with open('geo_opt_2.pickle', 'rb') as file:
opt_objects = pickle.load(file)[no_of_fractions][1]
else:
opt_objects = any_smart_loop_geometric(min_var=min_var,max_var=max_var,deltas=deltas,
initial_structure_obj=working_objects[no_of_fractions],
lateral_pressure=working_lateral[no_of_fractions],
init_filter = init_filter,side=side,const_chk=const_chk,
fat_obj = working_fatigue[no_of_fractions],
slamming_press = working_slamming[no_of_fractions],
fat_press=working_fatigue_press[no_of_fractions],
predefiened_stiffener_iter = predefiened_stiffener_iter,
ml_algo=ml_algo)
# Finding weight of this solution.
tot_weight, frame_spacings, valid, width, weight_details = 0, [None for dummy in range(len(opt_objects))], \
True, 10, {'frames': list(), 'objects': list(),
'scales': list()}
#print('Weight for', no_of_fractions)
for count, opt in enumerate(opt_objects):
obj = opt[0]
if opt[3]:
weigth_to_add = calc_weight((obj.Plate.get_s(),obj.Plate.get_pl_thk(),obj.Stiffener.get_web_h(),
obj.Stiffener.get_web_thk(),
obj.Stiffener.get_fl_w(),obj.Stiffener.get_fl_thk(),
obj.Plate.get_span(),width), prt=False)
tot_weight += weigth_to_add
weight_details['objects'].append(weigth_to_add)
if frame_spacings[count // 2] is None:
frame_spacings[count // 2] = obj.Plate.get_s()
#print('added normal weight', weigth_to_add)
else:
# In this case there are no applicable solutions found in the specified dimension ranges.
tot_weight += float('inf')
valid = False
if valid:
#print(frame_distance)
for frame in range(no_of_fractions-1):
frame_height = 2.5 if frame_distance is None else frame_distance['start_dist'] + \
(frame_distance['stop_dist']-
frame_distance['start_dist']) * \
((frame+1)/no_of_fractions)
#pl_area, stf_area = 0.018 * width, 0.25 * 0.015 * (width//frame_spacings[frame])
this_x = (frame_spacings[frame], opt_girder_prop[0], opt_girder_prop[1], opt_girder_prop[2],
opt_girder_prop[3], opt_girder_prop[4], None, width)
this_weight = sum(get_field_tot_area(this_x))* frame_height * 7850
scale_max, scale_min = opt_girder_prop[5], opt_girder_prop[6]
this_scale = scale_min + (scale_max-scale_min) * (abs((max_frame_count-(count+1)/2))/
(max_frame_count-min_frame_count))
#print('Number of fractions', no_of_fractions, 'Scale', this_scale)
tot_weight += this_weight * this_scale
solution_found = True
#print('added frame weight', this_weight * this_scale)
weight_details['frames'].append(this_weight * this_scale)
weight_details['scales'].append(this_scale)
elif iterations == 2:
solution_found = True # Only iterate once.
if predefiened_stiffener_iter is not None or not reiterate:
solution_found = True # Noe solution may be found, but in this case no more iteations.
results[no_of_fractions] = tot_weight, opt_objects, weight_details
# for key, val in results.items():
# print(key)
# print(val)
return results
def any_find_min_weight_var(var):
'''
Find the minimum weight of the inpu
:param min:
:param max:
:return:
'''
return min(map(calc_weight))
def any_constraints_cylinder(x,obj: calc.CylinderAndCurvedPlate,init_weight, lat_press = None,side='p',
chk=(True,True,True,True, True, True, True, False, False, False),
fat_dict = None, fat_press = None, slamming_press = 0,fdwn = 1, fup = 0.5,
ml_results = None):
'''
Checking all constraints defined.
iter_var = ((item,init_stuc_obj,lat_press,init_filter_weight,side,chk,fat_dict,fat_press,slamming_press, PULSrun)
for item in iterable_all)
:param x:
:return:
'''
all_checks = [0,0,0,0,0,0,0,0]
check_map = {'weight': 0, 'UF unstiffened': 1, 'Column stability': 2, 'UF longitudinal stiffeners':3,
'Stiffener check': 4, 'UF ring stiffeners':5, 'UF ring frame': 6, 'Check OK': 7}
calc_obj = create_new_cylinder_obj(obj, x)
optimizing = True if any([calc_obj.RingStfObj is None, calc_obj.RingFrameObj is None]) else False
# Weigth
if init_weight != False:
this_weight = calc_weight_cylinder(x)
if this_weight > init_weight:
results = calc_obj.get_utilization_factors(optimizing=optimizing, empty_result_dict = True)
results['Weight'] = this_weight
all_checks[0] += 1
return False, 'Weight filter', x, all_checks, calc_obj
if chk[0]:
results = calc_obj.get_utilization_factors(optimizing = optimizing)
if results[0]:
all_checks[check_map[results[1]]] += 1
return True, results[1], x, all_checks, calc_obj
else:
all_checks[check_map[results[1]]] += 1
return False, results[1], x, all_checks, calc_obj
def any_constraints_all(x,obj,lat_press,init_weight,side='p',chk=(True,True,True,True, True, True, True, False,
False, False),
fat_dict = None, fat_press = None, slamming_press = 0, PULSrun: calc.PULSpanel = None,
print_result = False, fdwn = 1, fup = 0.5, ml_results = None, random_result_return = False):
'''
Checking all constraints defined.
iter_var = ((item,init_stuc_obj,lat_press,init_filter_weight,side,chk,fat_dict,fat_press,slamming_press, PULSrun)
for item in iterable_all)
:param x:
:return:
'''
if random_result_return:
# Skip all calculations
if random.choice([True,False,False,False,False,False,False]):
return True, 'Check OK', x, [0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5]
else:
return False, 'Random result', x, [1.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5]
all_checks = [0,0,0,0,0,0,0,0,0,0,0]
print_result = False
calc_object_stf = None if obj.Stiffener is None else create_new_calc_obj(obj.Stiffener, x, fat_dict, fdwn = fdwn, fup = fup)
calc_object_pl = create_new_calc_obj(obj.Plate, x, fat_dict, fdwn=fdwn, fup=fup)
calc_object = [calc.AllStructure(Plate=calc_object_pl[0],
Stiffener=None if obj.Stiffener is None else calc_object_stf[0], Girder=None,
main_dict=obj.get_main_properties()['main dict']), calc_object_pl[1]]
calc_object[0].lat_press = lat_press
# PULS buckling check
if chk[7] and PULSrun is not None:
x_id = x_to_string(x)
if calc_object[0].Plate.get_puls_method() == 'buckling':
puls_uf = PULSrun.get_puls_line_results(x_id)["Buckling strength"]["Actual usage Factor"][0]
elif calc_object[0].Plate.get_puls_method() == 'ultimate':
puls_uf = PULSrun.get_puls_line_results(x_id)["Ultimate capacity"]["Actual usage Factor"][0]
if type(puls_uf) == str or puls_uf is None:
return False, 'PULS', x, all_checks
all_checks[8] = puls_uf/PULSrun.puls_acceptance
if puls_uf/PULSrun.puls_acceptance >= 1:
if print_result:
print('PULS', calc_object[0].get_one_line_string(), False)
return False, 'PULS', x, all_checks
# Buckling ml-cl
if chk[8]:
if any([calc_object[0].Plate.get_puls_method() == 'buckling' and ml_results[0] != 9,
calc_object[0].Plate.get_puls_method() == 'ultimate' and ml_results[1] != 9]):
if print_result:
print('Buckling ML-CL', calc_object[0].Stiffener.get_one_line_string(), False)
return False, 'Buckling ML-CL', x, all_checks
# Buckling ml-reg
if chk[9]:
pass
this_weight = calc_weight(x)
if this_weight > init_weight:
weigt_frac = this_weight / init_weight
if print_result:
pass
# print('Weights', calc_weight(x), ' > ', init_weight,
# calc_object[0].get_one_line_string(), init_weight, False)
all_checks[0] = weigt_frac
return False, 'Weight filter', x, all_checks
# Section modulus
if chk[0] and calc_object[0].Stiffener is not None:
section_modulus = min(calc_object[0].Stiffener.get_section_modulus())
min_section_modulus = calc_object[0].Stiffener.get_dnv_min_section_modulus(lat_press*1000)
section_frac = section_modulus / min_section_modulus
#print(section_modulus, min_section_modulus, section_frac, lat_press)
all_checks[1] = section_frac
if not section_modulus > min_section_modulus :
if print_result:
print('Section modulus',calc_object[0].get_one_line_string(), False)
return False, 'Section modulus', x, all_checks
# Local stiffener buckling
if chk[6] and calc_object[0].Stiffener is not None:
buckling_local = calc_object[0].local_buckling(optimizing=True)
check = all([buckling_local['Stiffener'][0] < calc_object[0].Stiffener.hw,
buckling_local['Stiffener'][1] < calc_object[0].Stiffener.b])
all_checks[2] = max([0 if buckling_local['Stiffener'][0] == 0 else
calc_object[0].Stiffener.hw/buckling_local['Stiffener'][0],
0 if buckling_local['Stiffener'][1] == 0 else
calc_object[0].Stiffener.b/buckling_local['Stiffener'][1]])
if not check:
if print_result:
print('Local stiffener buckling',calc_object[0].get_one_line_string(), False)
return False, 'Local stiffener buckling', x, all_checks
# Buckling
if chk[3]:
'''
{'Plate': {'Plate buckling': up_buckling}, 'Stiffener': {'Overpressure plate side': stf_buckling_pl_side,
'Overpressure stiffener side': stf_buckling_stf_side,
'Resistance between stiffeners': stf_plate_resistance,
'Shear capacity': stf_shear_capacity},
'Girder': {'Overpressure plate side': girder_buckling_pl_side,
'Overpressure girder side': girder_buckling_girder_side,
'Shear capacity': girder_shear_capacity},
'Local buckling': local_buckling}
'''
buckling_results = calc_object[0].plate_buckling(optimizing=True)
res = [buckling_results['Plate']['Plate buckling'],]
for val in buckling_results['Stiffener'].values():
res.append(val)
# for val in buckling_results['Girder'].values():
# res.append(val)
buckling_results = res
# print(buckling_results)
all_checks[3] = max(buckling_results)
if not all([uf<=1 for uf in buckling_results]):
if print_result:
print('Buckling',calc_object[0].get_one_line_string(), False)
return False, 'Buckling', x, all_checks
# Minimum plate thickness
if chk[1]:
act_pl_thk = calc_object[0].Plate.get_pl_thk()
min_pl_thk = calc_object[0].Plate.get_dnv_min_thickness(lat_press*1000)/1000
plate_frac = min_pl_thk / act_pl_thk
all_checks[4] = plate_frac
if not act_pl_thk > min_pl_thk:
if print_result:
print('Minimum plate thickeness',calc_object[0].get_one_line_string(), False)
return False, 'Minimum plate thickness', x, all_checks
# Shear area
if chk[2]:
pass
# calc_shear_area = calc_object[0].Stiffener.get_shear_area()
# min_shear_area = calc_object[0].Stiffener.get_minimum_shear_area(lat_press)
# shear_frac = min_shear_area / calc_shear_area
# all_checks[5] = shear_frac
# if not calc_shear_area > min_shear_area:
# if print_result:
# print('Shear area',calc_object[0].Stiffener.get_one_line_string(), False)
# return False, 'Shear area', x, all_checks
# Fatigue
if chk[4] and fat_dict is not None and fat_press is not None:
fatigue_uf = calc_object[1].get_total_damage(ext_press=fat_press[0],
int_press=fat_press[1])*calc_object[1].get_dff()
all_checks[6] = fatigue_uf
if fatigue_uf > 1:
if print_result:
print('Fatigue',calc_object[0].Stiffener.get_one_line_string(), False)
return False, 'Fatigue', x, all_checks
# Slamming
if chk[5] and slamming_press != 0 and calc_object[0].Stiffener is not None:
slam_check = calc_object[0].Stiffener.check_all_slamming(slamming_press)
all_checks[7] = slam_check[1]
if slam_check[0] is False:
if print_result:
print('Slamming',calc_object[0].Stiffener.get_one_line_string(), False)
return False, 'Slamming', x, all_checks
if print_result:
print('OK Section', calc_object[0].Stiffener.get_one_line_string(), True)
return True, 'Check OK', x, all_checks
def constraint_geometric(fractions, *args):
return sum(fractions) == 1
def pso_constraint_geometric(x,*args):
''' The sum of the fractions must be 1.'''
return 1-sum(x)
def create_new_cylinder_obj(init_obj, x_new):
'''
shell (0.02, 2.5, 5, 5, 10, nan, nan, nan),
long (0.875, nan, 0.3, 0.01, 0.1, 0.01, nan, nan),
ring (nan, nan, 0.3, 0.01, 0.1, 0.01, nan, nan),
ring (nan, nan, 0.7, 0.02, 0.2, 0.02, nan, nan)]
'''
stress_press = [init_obj.sasd, init_obj.smsd, init_obj.tTsd, init_obj.tQsd, init_obj.shsd]
shell_obj = init_obj.ShellObj
long_obj = init_obj.LongStfObj
'''
t1, r1, s1, hw1, tw1, b1, tf1 = x1
t1, r1, s2, hw2, tw2, b2, tf2 = x2
'''
x_old = shell_obj.thk, shell_obj.radius, \
init_obj.panel_spacing if long_obj is None else long_obj.s/1000, \
0 if long_obj is None else long_obj.hw/1000, \
0 if long_obj is None else long_obj.tw/1000,\
0 if long_obj is None else long_obj.b/1000,\
0 if long_obj is None else long_obj.tf/1000,
x_new_stress_scaling = x_new[0][0] if not np.isnan(x_new[0][0]) else shell_obj.thk, \
x_new[0][1] if not np.isnan(x_new[0][1]) else shell_obj.radius,\
x_new[0][5] if long_obj is None else x_new[1][0], \
0 if long_obj is None else x_new[1][2], \
0 if long_obj is None else x_new[1][3],\
0 if long_obj is None else x_new[1][4],\
0 if long_obj is None else x_new[1][5]
new_stresses = stress_scaling_cylinder(x_old, x_new_stress_scaling, stress_press)
new_obj = copy.deepcopy(init_obj)
new_obj.sasd, new_obj.smsd, new_obj.tTsd, new_obj.tQsd, new_obj.shsd = new_stresses
new_obj.ShellObj.radius = x_new[0][1]
new_obj.ShellObj.thk = x_new[0][0]
if long_obj is None:
new_obj.panel_spacing = x_new[0][5]
else:
new_obj.LongStfObj.s = x_new[1][0]*1000
new_obj.LongStfObj.hw = x_new[1][2]*1000
new_obj.LongStfObj.tw = x_new[1][3]*1000
new_obj.LongStfObj.b = x_new[1][4]*1000
new_obj.LongStfObj.tf = x_new[1][5]*1000
#new_obj.LongStfObj.stiffener_type = x_new[1][7] # TODO should be 8
return new_obj
def create_new_calc_obj(init_obj,x, fat_dict=None, fdwn = 1, fup = 0.5):
'''
Returns a new calculation object to be used in optimization
:param init_obj:
:return:
'''
if type(init_obj) == calc.AllStructure:
if init_obj.Stiffener is not None:
plate = init_obj.Plate
stiffener = init_obj.Stiffener
girder = init_obj.Girder
x_old = (plate.get_s(), plate.get_pl_thk(), stiffener.get_web_h(), stiffener.get_web_thk(),
stiffener.get_fl_w(),
stiffener.get_fl_thk(), plate.get_span(), stiffener.get_lg() if girder is None else
girder.get_lg(), stiffener.stiffener_type)
else:
x_old = init_obj.Plate.get_tuple()
sigma_y1_new = stress_scaling(init_obj.Plate.get_sigma_y1(), init_obj.Plate.get_pl_thk(), x[1], fdwn = fdwn, fup = fup)
sigma_y2_new = stress_scaling(init_obj.Plate.get_sigma_y2(), init_obj.Plate.get_pl_thk(), x[1], fdwn = fdwn, fup = fup)
tau_xy_new = stress_scaling(init_obj.Plate.get_tau_xy(), init_obj.Plate.get_pl_thk(), x[1], fdwn = fdwn, fup = fup)
sigma_x1_new = stress_scaling_area(init_obj.Plate.get_sigma_x1(),
sum(get_field_tot_area(x_old)),
sum(get_field_tot_area(x)), fdwn = fdwn, fup = fup)
sigma_x2_new = stress_scaling_area(init_obj.Plate.get_sigma_x2(),
sum(get_field_tot_area(x_old)),
sum(get_field_tot_area(x)), fdwn = fdwn, fup = fup)
try:
stf_type = x[8]
except IndexError:
stf_type = init_obj.plate.get_stiffener_type()
main_dict = {'mat_yield': [init_obj.Plate.get_fy(), 'Pa'],'mat_factor': [init_obj.Plate.get_mat_factor(), 'Pa'],
'span': [init_obj.Plate.get_span(), 'm'],
'spacing': [x[0], 'm'],'plate_thk': [x[1], 'm'],'stf_web_height':[ x[2], 'm'],
'stf_web_thk': [x[3], 'm'],'stf_flange_width': [x[4], 'm'],
'stf_flange_thk': [x[5], 'm'],'structure_type': [init_obj.Plate.get_structure_type(), ''],
'stf_type': [stf_type, ''],'sigma_y1': [sigma_y1_new, 'MPa'],
'sigma_y2': [sigma_y2_new, 'MPa'],'sigma_x1': [sigma_x1_new, 'MPa'],'sigma_x2': [sigma_x2_new, 'MPa'],
'tau_xy': [tau_xy_new, 'MPa'],'plate_kpp': [init_obj.Plate.get_kpp(), ''],
'stf_kps': [init_obj.Plate.get_kps(), ''],'stf_km1': [init_obj.Plate.get_km1(), ''],
'stf_km2': [init_obj.Plate.get_km2(), ''],'stf_km3': [init_obj.Plate.get_km3(), ''],
'structure_types':[init_obj.Plate.get_structure_types(), ''],
'zstar_optimization': [init_obj.Plate.get_z_opt(), ''],
'puls buckling method':[init_obj.Plate.get_puls_method(),''],
'puls boundary':[init_obj.Plate.get_puls_boundary(),''],
'puls stiffener end':[init_obj.Plate.get_puls_stf_end(),''],
'puls sp or up':[init_obj.Plate.get_puls_sp_or_up(),''],
'puls up boundary':[init_obj.Plate.get_puls_up_boundary(),''],
'panel or shell': [init_obj.Plate.panel_or_shell, '']}
all_dict = init_obj.get_main_properties()
all_dict['Plate'] = main_dict
all_dict['Stiffener'] = None if init_obj.Stiffener is None else main_dict
all_dict['Girder'] = None if init_obj.Girder is None else main_dict
if fat_dict == None:
return calc.AllStructure(Plate=None if all_dict['Plate'] is None
else calc.CalcScantlings(all_dict['Plate']),
Stiffener=None if all_dict['Stiffener'] is None
else calc.CalcScantlings(all_dict['Stiffener']),
Girder=None if all_dict['Girder'] is None
else calc.CalcScantlings(all_dict['Girder']),
main_dict=all_dict['main dict']), None
else:
return calc.AllStructure(Plate=None if all_dict['Plate'] is None
else calc.CalcScantlings(all_dict['Plate']),
Stiffener=None if all_dict['Stiffener'] is None
else calc.CalcScantlings(all_dict['Stiffener']),
Girder=None if all_dict['Girder'] is None
else calc.CalcScantlings(all_dict['Girder']),
main_dict=all_dict['main dict']), \
calc.CalcFatigue(main_dict, fat_dict)
else:
x_old = [init_obj.get_s(), init_obj.get_pl_thk(), init_obj.get_web_h() , init_obj.get_web_thk(),
init_obj.get_fl_w(),init_obj.get_fl_thk(), init_obj.get_span(), init_obj.get_lg()]
sigma_y1_new = stress_scaling(init_obj.get_sigma_y1(), init_obj.get_pl_thk(), x[1], fdwn = fdwn, fup = fup)
sigma_y2_new = stress_scaling(init_obj.get_sigma_y2(), init_obj.get_pl_thk(), x[1], fdwn = fdwn, fup = fup)
tau_xy_new = stress_scaling(init_obj.get_tau_xy(), init_obj.get_pl_thk(), x[1], fdwn = fdwn, fup = fup)
sigma_x1_new = stress_scaling_area(init_obj.get_sigma_x1(),
sum(get_field_tot_area(x_old)),
sum(get_field_tot_area(x)), fdwn = fdwn, fup = fup)
sigma_x2_new = stress_scaling_area(init_obj.get_sigma_x2(),
sum(get_field_tot_area(x_old)),
sum(get_field_tot_area(x)), fdwn = fdwn, fup = fup)
try:
stf_type = x[8]
except IndexError:
stf_type = init_obj.get_stiffener_type()
main_dict = {'mat_yield': [init_obj.get_fy(), 'Pa'],'mat_factor': [init_obj.get_mat_factor(), 'Pa'],
'span': [init_obj.get_span(), 'm'],
'spacing': [x[0], 'm'],'plate_thk': [x[1], 'm'],'stf_web_height':[ x[2], 'm'],
'stf_web_thk': [x[3], 'm'],'stf_flange_width': [x[4], 'm'],
'stf_flange_thk': [x[5], 'm'],'structure_type': [init_obj.get_structure_type(), ''],
'stf_type': [stf_type, ''],'sigma_y1': [sigma_y1_new, 'MPa'],
'sigma_y2': [sigma_y2_new, 'MPa'],'sigma_x1': [sigma_x1_new, 'MPa'],'sigma_x2': [sigma_x2_new, 'MPa'],
'tau_xy': [tau_xy_new, 'MPa'],'plate_kpp': [init_obj.get_kpp(), ''],
'stf_kps': [init_obj.get_kps(), ''],'stf_km1': [init_obj.get_km1(), ''],
'stf_km2': [init_obj.get_km2(), ''],'stf_km3': [init_obj.get_km3(), ''],
'structure_types':[init_obj.get_structure_types(), ''],
'zstar_optimization': [init_obj.get_z_opt(), ''],
'puls buckling method':[init_obj.get_puls_method(),''],
'puls boundary':[init_obj.get_puls_boundary(),''],
'puls stiffener end':[init_obj.get_puls_stf_end(),''],
'puls sp or up':[init_obj.get_puls_sp_or_up(),''],
'puls up boundary':[init_obj.get_puls_up_boundary(),''],
'panel or shell': [init_obj.panel_or_shell, '']}
if fat_dict == None:
return calc.CalcScantlings(main_dict), None
else:
return calc.CalcScantlings(main_dict), calc.CalcFatigue(main_dict, fat_dict)
def create_new_structure_obj(init_obj, x, fat_dict=None, fdwn = 1, fup = 0.5):
'''
Returns a new calculation object to be used in optimization
:param init_obj:
:return:
'''
x_old = [init_obj.get_s(), init_obj.get_pl_thk(), init_obj.get_web_h() , init_obj.get_web_thk(),
init_obj.get_fl_w() ,init_obj.get_fl_thk(), init_obj.get_span(), init_obj.get_lg()]
sigma_y1_new = stress_scaling(init_obj.get_sigma_y1(), init_obj.get_pl_thk(), x[1], fdwn = fdwn, fup = fup)
sigma_y2_new = stress_scaling(init_obj.get_sigma_y2(), init_obj.get_pl_thk(), x[1], fdwn = fdwn, fup = fup)
tau_xy_new = stress_scaling(init_obj.get_tau_xy(), init_obj.get_pl_thk(), x[1],fdwn = fdwn, fup = fup)
sigma_x1_new = stress_scaling_area(init_obj.get_sigma_x1(),sum(get_field_tot_area(x_old)),sum(get_field_tot_area(x)),
fdwn = fdwn, fup = fup)
sigma_x2_new = stress_scaling_area(init_obj.get_sigma_x2(),sum(get_field_tot_area(x_old)),sum(get_field_tot_area(x)),
fdwn = fdwn, fup = fup)
try:
stf_type = x[8]
except IndexError:
stf_type = init_obj.get_stiffener_type()
main_dict = {'mat_yield': [init_obj.get_fy(), 'Pa'], 'span': [init_obj.get_span(), 'm'],
'mat_factor': [init_obj.get_mat_factor(), 'Pa'],
'spacing': [x[0], 'm'], 'plate_thk': [x[1], 'm'], 'stf_web_height': [x[2], 'm'],
'stf_web_thk': [x[3], 'm'], 'stf_flange_width': [x[4], 'm'],
'stf_flange_thk': [x[5], 'm'], 'structure_type': [init_obj.get_structure_type(), ''],
'stf_type': [stf_type, ''], 'sigma_y1': [sigma_y1_new, 'MPa'],
'sigma_y2': [sigma_y2_new, 'MPa'], 'sigma_x1': [sigma_x1_new, 'MPa'],'sigma_x2': [sigma_x2_new, 'MPa'],
'tau_xy': [tau_xy_new, 'MPa'], 'plate_kpp': [init_obj.get_kpp(), ''],
'stf_kps': [init_obj.get_kps(), ''], 'stf_km1': [init_obj.get_km1(), ''],
'stf_km2': [init_obj.get_km2(), ''], 'stf_km3': [init_obj.get_km3(), ''],
'structure_types': [init_obj.get_structure_types(), ''],
'zstar_optimization': [init_obj.get_z_opt(), ''],
'puls buckling method': [init_obj.get_puls_method(), ''],
'puls boundary': [init_obj.get_puls_boundary(), ''],
'puls stiffener end': [init_obj.get_puls_stf_end(), ''],
'puls sp or up': [init_obj.get_puls_sp_or_up(), ''],
'puls up boundary': [init_obj.get_puls_up_boundary(), ''],
}
#if fat_dict == None:
return calc.Structure(main_dict)
def get_field_tot_area(x):
''' Total area of a plate field. '''
if len(x) == 6:
width = 10
else:
width = x[7]
plate_area = width*x[1]
stiff_area = (x[2] * x[3]+ x[4] * x[5]) * (width//x[0])
return plate_area, stiff_area
def calc_weight(x, prt = False):
'''
Calculating the current weight
:param current_dict:
:return:
'''
span = x[6]
plate_area, stiff_area = get_field_tot_area(x)
if prt:
print('x is', x, 'plate area', plate_area, 'stiff area', stiff_area, 'weight',
span * 7850 * (plate_area + stiff_area))
return span * 7850 * (plate_area + stiff_area)
def calc_weight_pso(x,*args):
'''
Calculating the current weight
:param current_dict:
:return:
'''
width = args[5]
span = args[6]
plate_area = width*x[1]
stiff_area = (x[2] * x[3]+ x[4] * x[5]) * (width//x[0])
return span * 7850 * (plate_area + stiff_area)
def calc_weight_pso_section(x,*args):
'''
Calculating the weight of a complete section.
:param x:
:param args:
:return:
'''
stru_objects = args[1]
tot_length = args[2]
frame_height = args[3]
frame_section_area = args[4]
tot_weight = 0
for dummy_i in range(len(stru_objects)):
tot_weight += frame_section_area*frame_height*7850
count = 0
for stru_object in stru_objects:
span = tot_length*x[count]
stru_object.Plate.set_span(span)
stru_object.Stiffener.set_span(span)
tot_weight += stru_object.Stiffener.get_weight_width_lg()
return tot_weight
def calc_weight_cylinder(x):
'''
Calculation of total weigth.
shell (0.02, 2.5, 5, 5, 10, nan, nan, nan),
long (0.875, nan, 0.3, 0.01, 0.1, 0.01, nan, nan),
ring (nan, nan, 0.3, 0.01, 0.1, 0.01, nan, nan),
ring (nan, nan, 0.7, 0.02, 0.2, 0.02, nan, nan)]
'''
if sum(x[1][0:8]) != 0:
num_long_stf = 2*math.pi*x[0][1]/x[1][0]
long_stf_area = x[1][2]*x[1][3]+x[1][4]*x[1][5]
long_stf_volume = long_stf_area * x[0][4] * num_long_stf
else:
long_stf_volume = 0
if sum(x[2][0:8]) != 0:
num_ring_stf = x[0][4] / x[0][2]
ring_stf_volume = math.pi*(math.pow(x[0][1],2)-math.pow(x[0][1]-x[2][2],2))*x[2][3] + \
2*math.pi*(x[0][1]-x[2][2]) * x[2][4] * x[2][5]
ring_stf_tot_vol = ring_stf_volume * num_ring_stf
else:
ring_stf_tot_vol = 0
if sum(x[3][0:8]) != 0:
num_ring_girder = x[0][4] / x[0][3]
ring_frame_volume = math.pi*(math.pow(x[0][1],2)-math.pow(x[0][1]-x[3][2],2))*x[3][3] + \
2*math.pi*(x[0][1]-x[3][2])*x[3][4]*x[3][5]
tot_ring_frame_vol = ring_frame_volume*num_ring_girder
else:
tot_ring_frame_vol = 0
shell_volume = 2 * math.pi * x[0][1] * x[0][0] * x[0][4]
return (long_stf_volume+ring_stf_tot_vol+tot_ring_frame_vol+shell_volume)*7850
def stress_scaling_cylinder(x1, x2, stress1):
'''
Scale stresses of a stiffened cylinder.
To scale:
Design axial stress, sa,sd =
Design bending stress, sm,sd =
Design torsional stress, tT,sd=
Design shear stress, tQ,sd=
Additional hoop stress, sh,sd =
'''
t1, r1, s1, hw1, tw1, b1, tf1 = x1
t2, r2, s2, hw2, tw2, b2, tf2 = x2
sasd1, smsd1, tTsd1, tQsd1, shsd1 = stress1
A1 = hw1 * tw1 + b1 * tf1
A2 = hw2 * tw2 + b2 * tf2
# Axial stress changes by equivalent thickness
thk_eq1 = t1 + 0 if s1 == 0 else A1 / s1
thk_eq2 = t2 + 0 if s2 == 0 else A2 / s2
# Moment stress changes by difference in moment of inertia
Itot1 = calc.CylinderAndCurvedPlate.get_Itot(hw=hw1, tw=tw1, b=b1, tf=tf1, r=r1, s=s1, t=t1)
Itot2 = calc.CylinderAndCurvedPlate.get_Itot(hw=hw2, tw=tw2, b=b2, tf=tf2, r=r2, s=s2, t=t2)
# Torsional, shear and hoop changes by cylinder thickness.
return sasd1*(thk_eq1/thk_eq2), smsd1*(Itot1/Itot2), tTsd1*(t1/t2), tQsd1*(t1/t2), shsd1*(t1/t2)
def stress_scaling(sigma_old,t_old,t_new, fdwn = 1, fup = 0.5):
if t_new <= t_old: #decreasing the thickness
sigma_new = sigma_old*(t_old/(t_old-fdwn*abs((t_old-t_new))))
# assert sigma_new >= sigma_old, 'ERROR no stress increase: \n' \
# 't_old '+str(t_old)+' sigma_old '+str(sigma_old)+ \
# '\nt_new '+str(t_new)+' sigma_new '+str(sigma_new)
else: #increasing the thickness
sigma_new = sigma_old*(t_old/(t_old+fup*abs((t_old-t_new))))
# assert sigma_new <= sigma_old, 'ERROR no stress reduction: \n' \
# 't_old '+str(t_old)+' sigma_old '+str(sigma_old)+ \
# '\nt_new '+str(t_new)+' sigma_new '+str(sigma_new)
return sigma_new
def stress_scaling_area(sigma_old,a_old,a_new, fdwn = 1, fup = 0.5):
''' Scale stresses using input area '''
if a_new <= a_old: #decreasing the thickness
sigma_new = sigma_old*(a_old/(a_old-fdwn*abs((a_old-a_new))))
# assert sigma_new >= sigma_old, 'ERROR no stress increase: \n' \
# 't_old '+str(a_old)+' sigma_old '+str(sigma_old)+ \
# '\nt_new '+str(a_new)+' sigma_new '+str(sigma_new)
else: #increasing the thickness
sigma_new = sigma_old*(a_old/(a_old+fup*abs((a_old-a_new))))
# assert sigma_new <= sigma_old, 'ERROR no stress reduction: \n' \
# 't_old '+str(a_old)+' sigma_old '+str(sigma_old)+ \
# '\nt_new '+str(a_new)+' sigma_new '+str(sigma_new)
#print('a_old', a_old, 'sigma_old', sigma_old, '|', 'a_new', a_new, 'sigma_new',sigma_new)
return sigma_new
def x_to_string(x):
ret = ''
for val in x:
ret += str(val) + '_'
return ret
def get_filtered_results(iterable_all,init_stuc_obj,lat_press,init_filter_weight,side='p',
chk=(True,True,True,True,True,True,True, False),fat_dict = None, fat_press = None,
slamming_press=None, processes = None, puls_sheet = None, puls_acceptance = 0.87,
fdwn = 1, fup = 0.5, ml_algo = None):
'''
Using multiprocessing to return list of applicable results.
:param iterable_all:
:param init_stuc_obj:
:param lat_press:
:param init_filter_weight:
:param side:
:param chk:
:return:
'''
#print('Init filter weight', init_filter_weight)
'''
x,obj,lat_press,init_weight,side='p',chk=(True,True,True,True, True, True, True, False),
fat_dict = None, fat_press = None, slamming_press = 0, , puls_results = None, print_result = False
'''
if chk[7]:
# PULS to be used.
#calc.PULSpanel
'''
dict_to_run[line_given] = self._line_to_struc[line_given][1].get_puls_input()
dict_to_run[line_given]['Identification'] = line_given
dict_to_run[line_given]['Pressure (fixed)'] = self.get_highest_pressure(line_given)['normal'] / 1e6
'''
dict_to_run = {}
for x in iterable_all:
x_id = x_to_string(x)
# calc_object = create_new_calc_obj(init_stuc_obj, x, fat_dict, fdwn = fdwn, fup = fup)
calc_object_stf = None if init_stuc_obj.Stiffener is None else create_new_calc_obj(init_stuc_obj.Stiffener,
x, fat_dict,
fdwn=fdwn, fup=fup)
calc_object_pl = create_new_calc_obj(init_stuc_obj.Plate, x, fat_dict, fdwn=fdwn, fup=fup)
calc_object = [calc.AllStructure(Plate=calc_object_pl[0],
Stiffener=None if init_stuc_obj.Stiffener is None else calc_object_stf[0],
Girder=None,
main_dict=init_stuc_obj.get_main_properties()['main dict']),
calc_object_pl[1]]
dict_to_run[x_id] = calc_object[0].Plate.get_puls_input()
dict_to_run[x_id]['Identification'] = x_id
dict_to_run[x_id]['Pressure (fixed)'] = lat_press # PULS sheet to have pressure in MPa
PULSrun = calc.PULSpanel(dict_to_run, puls_sheet_location=puls_sheet, puls_acceptance=puls_acceptance)
PULSrun.run_all()
sort_again = None
elif chk[8]:
# ML-CL to be used.
# Buckling ml-cl
sp_int, sp_gl_gt, up_int, up_gl_gt, \
sp_int_idx, sp_gl_gt_idx, up_int_idx, up_gl_gt_idx = \
list(), list(), list(),list(),list(), list(), list(),list()
# Create iterator
idx_count = 0
for idx, x in enumerate(iterable_all):
idx_count += 1
# calc_object = create_new_calc_obj(init_stuc_obj, x, fat_dict, fdwn=fdwn, fup=fup)
calc_object_stf = None if init_stuc_obj.Stiffener is None else create_new_calc_obj(init_stuc_obj.Stiffener,
x, fat_dict,
fdwn=fdwn, fup=fup)
calc_object_pl = create_new_calc_obj(init_stuc_obj.Plate, x, fat_dict, fdwn=fdwn, fup=fup)
calc_object = [calc.AllStructure(Plate=calc_object_pl[0],
Stiffener=None if init_stuc_obj.Stiffener is None else calc_object_stf[0],
Girder=None,
main_dict=init_stuc_obj.get_main_properties()['main dict']), calc_object_pl[1]]
if calc_object[0].Plate.get_puls_sp_or_up() == 'UP':
if calc_object[0].Plate.get_puls_boundary() == 'Int':
up_int.append(calc_object[0].Plate.get_buckling_ml_input(lat_press, alone = False))
up_int_idx.append(idx)
else:
up_gl_gt.append(calc_object[0].Plate.get_buckling_ml_input(lat_press, alone = False))
up_gl_gt_idx.append(idx)
else:
if calc_object[0].Plate.get_puls_boundary() == 'Int':
sp_int.append(calc_object[0].Plate.get_buckling_ml_input(lat_press, alone = False))
sp_int_idx.append(idx)
else:
sp_gl_gt.append(calc_object[0].Plate.get_buckling_ml_input(lat_press, alone = False))
sp_gl_gt_idx.append(idx)
# Predict
sort_again = np.zeros([len(iterable_all),2])
if len(sp_int) != 0:
sp_int_res = [ml_algo['cl SP buc int predictor'].predict(ml_algo['cl SP buc int scaler']
.transform(sp_int)),
ml_algo['cl SP ult int predictor'].predict(ml_algo['cl SP buc int scaler']
.transform(sp_int))]
for idx, res_buc, res_ult in zip(sp_int_idx, sp_int_res[0],sp_int_res[1]):
sort_again[idx] = [res_buc, res_ult]
if len(sp_gl_gt) != 0:
sp_gl_gt_res = [ml_algo['cl SP buc GLGT predictor'].predict(ml_algo['cl SP buc GLGT scaler']
.transform(sp_gl_gt)),
ml_algo['cl SP buc GLGT predictor'].predict(ml_algo['cl SP buc GLGT scaler']
.transform(sp_gl_gt))]
for idx, res_buc, res_ult in zip(sp_gl_gt_idx, sp_gl_gt_res[0],sp_gl_gt_res[1]):
sort_again[idx] = [res_buc, res_ult]
if len(up_int) != 0:
up_int_res = [ml_algo['cl UP buc int predictor'].predict(ml_algo['cl UP buc int scaler']
.transform(up_int)),
ml_algo['cl UP ult int predictor'].predict(ml_algo['cl UP buc int scaler']
.transform(up_int))]
for idx, res_buc, res_ult in zip(up_int_idx, up_int_res[0],up_int_res[1]):
sort_again[idx] = [res_buc, res_ult]
if len(up_gl_gt) != 0:
up_gl_gt_res =[ml_algo['cl UP buc GLGT predictor'].predict(ml_algo['cl UP buc GLGT scaler']
.transform(up_gl_gt)),
ml_algo['cl UP buc GLGT predictor'].predict(ml_algo['cl UP buc GLGT scaler']
.transform(up_gl_gt))]
for idx, res_buc, res_ult in zip(up_gl_gt_idx, up_gl_gt_res[0],up_gl_gt_res[1]):
sort_again[idx] = [res_buc, res_ult]
PULSrun = None
else:
PULSrun = None
idx_count = 0
for x in iterable_all:
idx_count += 1
sort_again = None
iter_var = list()
for idx,item in enumerate(iterable_all):
iter_var.append((item,init_stuc_obj,lat_press,init_filter_weight,side,chk,fat_dict,fat_press,slamming_press,
PULSrun, False,fdwn, fup, sort_again[idx] if chk[8] == True else None))
iter_var = tuple(iter_var)
#res_pre = it.starmap(any_constraints_all, iter_var)
if processes is None:
processes = max(cpu_count()-1,1)
with Pool(processes) as my_process:
# res_pre = m
# y_process.starmap_async(any_constraints_all, iter_var).get()
# print('Done calculating')
res_pre = my_process.starmap(any_constraints_all, iter_var)
check_ok, check_not_ok = list(), list()
for item in res_pre:
if item[0] is False:
check_not_ok.append(item)
else:
check_ok.append(item)
return check_ok, check_not_ok
def any_get_all_combs(min_var, max_var,deltas, init_weight = float('inf'), predef_stiffeners = None, stf_type = None):
'''
Calulating initial values.
:param min:
:param max:
:return:
'''
'''
shell_upper_bounds = np.array( [0.03, 2.5, 5, 0.8, 6, 6])
shell_deltas = np.array( [0.01, 2.5, 1, 0.1, 1, 1])
shell_lower_bounds = np.array( [0.02, 2.5, 5, 0.6, 4, 4])
long_upper_bounds = np.array( [0.875, None, 0.5, 0.018, 0.2, 0.03])
long_deltas = np.array( [0.025, None, 0.1, 0.004, 0.05, 0.005])
long_lower_bounds = np.array( [0.875, None, 0.3, 0.010, 0.1, 0.010])
ring_stf_upper_bounds = np.array( [None, None, 0.5, 0.018, 0.2, 0.03])
ring_stf_deltas = np.array( [None, None, 0.1, 0.004, 0.05, 0.005])
ring_stf_lower_bounds = np.array( [None, None, 0.3, 0.010, 0.1, 0.010])
ring_frame_upper_bounds = np.array( [None, None, 0.9, 0.04, 0.3, 0.04])
ring_frame_deltas = np.array( [None, None, 0.2, 0.01, 0.1, 0.01])
ring_frame_lower_bounds = np.array( [None, None, 0.5, 0.02, 0.2, 0.020])
'''
if min_var[0] is not None:
spacing_array = (np.arange(min_var[0], max_var[0]+ deltas[0], deltas[0])) if min_var[0] != max_var[0] \
else np.array([min_var[0]])
spacing_array = spacing_array[spacing_array <= max_var[0]]
else:
spacing_array = np.array([np.nan])
if min_var[1] is not None:
pl_thk_array = (np.arange(min_var[1], max_var[1]+ deltas[1], deltas[1])) if min_var[1] != max_var[1] \
else np.array([min_var[1]])
pl_thk_array = pl_thk_array[pl_thk_array <= max_var[1]]
else:
pl_thk_array = np.array([np.nan])
if predef_stiffeners is not None:
predef_iterable = list()
for pre_str in predef_stiffeners:
for spacing in spacing_array:
for pl_thk in pl_thk_array:
new_field = list(pre_str)
new_field[0] = spacing
new_field[1] = pl_thk
predef_iterable.append(tuple(new_field))
return predef_iterable
web_h_array = (np.arange(min_var[2], max_var[2]+ deltas[2], deltas[2])) if min_var[2] != max_var[2] \
else np.array([min_var[2]])
web_h_array = web_h_array[web_h_array <= max_var[2]]
web_thk_array = (np.arange(min_var[3], max_var[3]+ deltas[3], deltas[3])) if min_var[3] != max_var[3] \
else np.array([min_var[3]])
web_thk_array = web_thk_array[web_thk_array <= max_var[3]]
flange_w_array = (np.arange(min_var[4], max_var[4]+ deltas[4], deltas[4])) if min_var[4] != max_var[4] \
else np.array([min_var[4]])
flange_w_array = flange_w_array[flange_w_array <= max_var[4]]
if min_var[5] is not None:
flange_thk_array = (np.arange(min_var[5], max_var[5]+ deltas[5], deltas[5])) if min_var[5] != max_var[5] \
else np.array([min_var[5]])
flange_thk_array = flange_thk_array[flange_thk_array <= max_var[5]]
else:
flange_thk_array = np.array([np.nan])
if min_var[6] is not None:
span_array = (np.arange(min_var[6], max_var[6], deltas[4])) if min_var[6] != max_var[6] \
else np.array([min_var[6]])
else:
span_array = np.array([np.nan])
if min_var[7] is not None:
girder_array = (np.arange(min_var[7], max_var[7], deltas[7])) if min_var[7] != max_var[7] \
else np.array([min_var[7]])
else:
girder_array = np.array([np.nan])
comb = it.product(spacing_array, pl_thk_array, web_h_array, web_thk_array, flange_w_array, flange_thk_array,
span_array,girder_array)
return list(comb)
def get_initial_weight(obj,lat_press,min_var,max_var,deltas,trials,fat_dict,fat_press, predefined_stiffener_iter,
slamming_press, fdwn = 1, fup = 0.5, ml_algo = None):
'''
Return a guess of the initial weight used to filter the constraints.
Only aim is to reduce running time of the algorithm.
'''
min_weight = float('inf')
if predefined_stiffener_iter is None:
combs = any_get_all_combs(min_var, max_var, deltas)
else:
combs = any_get_all_combs(min_var, max_var, deltas,predef_stiffeners=[item.get_tuple() for item in
predefined_stiffener_iter])
trial_selection = random_product(combs, repeat=trials)
obj.lat_press = lat_press
for x in trial_selection:
if any_constraints_all(x=x,obj=obj,lat_press=lat_press,init_weight=min_weight,
fat_dict=fat_dict,fat_press = fat_press,slamming_press=slamming_press,
fdwn = fdwn, fup = fup)[0]:
current_weight = calc_weight(x)
if current_weight < min_weight:
min_weight = current_weight
return min_weight
def get_random_result(obj,lat_press,min_var,max_var,deltas,trials=10000,side='p',const_chk=(True,True,True,True,True),
fat_dict=None, fat_press=None):
'''
Return random results
'''
min_weight = float('inf')
ass_var = None
combs = any_get_all_combs(min_var, max_var, deltas)
trial_selection = random_product(combs,repeat=trials)
for x in trial_selection:
if any_constraints_all(x=x,obj=obj,lat_press=lat_press,init_weight=min_weight,side=side,chk=const_chk,
fat_dict = fat_dict, fat_press = fat_press)[0] is not False:
current_weight = calc_weight(x)
if current_weight < min_weight:
min_weight = current_weight
ass_var = x
if ass_var == None:
return ass_var
return create_new_structure_obj(obj, [round(item, 5) for item in ass_var]), \
create_new_calc_obj(obj, [round(item, 5) for item in ass_var])[0]
def get_random_result_no_bounds(obj,lat_press,min_var,max_var,trials=10000,side='p',const_chk=(True,True,True,True,True)
, fat_dict=None, fat_press=None):
'''
Return random results, ignoring the deltas
'''
min_weight = float('inf')
ass_var = None
for trial in range(trials):
spacing = random.randrange(int(min_var[0]*1000),int(max_var[0]*1000),1)/1000
pl_thk = random.randrange(int(min_var[1]*1000),int(max_var[1]*1000),1)/1000
web_h = random.randrange(int(min_var[2]*1000),int(max_var[2]*1000),1)/1000
web_thk = random.randrange(int(min_var[3]*1000),int(max_var[3]*1000),1)/1000
fl_w = random.randrange(int(min_var[4]*1000),int(max_var[4]*1000),1)/1000
fl_thk = random.randrange(int(min_var[5]*1000),int(max_var[5]*1000),1)/1000
x = (spacing,pl_thk,web_h,web_thk,fl_w,fl_thk,min_var[6],min_var[7])
if any_constraints_all(x=x,obj=obj,lat_press=lat_press,init_weight=min_weight,side=side,chk=const_chk,
fat_dict = fat_dict, fat_press = fat_press)[0]:
current_weight = calc_weight(x)
if current_weight < min_weight:
min_weight = current_weight
ass_var = x
if ass_var == None:
return ass_var
return create_new_structure_obj(obj, [round(item, 5) for item in ass_var]), \
create_new_calc_obj(obj, [round(item, 5) for item in ass_var])[0]
def random_product(*args, repeat=1):
"Random selection from itertools.product(*args, **kwds)"
pools = [tuple(pool) for pool in args] * repeat
return tuple(random.choice(pool) for pool in pools)
def product_any(*args, repeat=1,weight=float('inf')):
# product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy
# product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111
pools = [tuple(pool) for pool in args] * repeat
result = [[]]
for pool in pools:
result = [x+[y] for x in result for y in pool]
for prod in result:
if calc_weight(prod) < weight:
yield tuple(prod)
def plot_optimization_results(results, multiple = False):
check_ok_array, check_array, section_array = list(), list(), list()
save_to_csv = asksaveasfilename()
if save_to_csv != '':
csv_file = open(save_to_csv,'w', newline='')
csv_writer = csv.writer(csv_file)
csv_writer.writerow(['Is OK', 'Check info', 'pl b', 'pl thk', 'web h', 'web thk', 'fl b', 'fl thk', 'span',
'girder width', 'stiffener type', 'uf weight', 'uf sec mod', 'uf loc stf buc',
'uf buckling', 'uf min pl', 'uf shear', 'uf fatigue', 'uf slamming'])
for check_ok, check, section, ufres in results[4]:
check_ok_array.append(check_ok)
check_array.append(check)
section_array.append(section)
if save_to_csv != '':
to_write = list()
to_write.append(check_ok)
to_write.append(check)
[to_write.append(item) for item in section]
if(len(section) == 8):
to_write.append('T')
[to_write.append(item) for item in ufres]
csv_writer.writerow(to_write)
if save_to_csv != '':
csv_file.close()
check_ok_array, check_array, section_array = np.array(check_ok_array), \
np.array(check_array), \
np.array(section_array)
x_label = np.unique(check_array)
y = [np.count_nonzero(check_array == item) for item in np.unique(check_array)]
fig, axs = plt.subplots(2, 1)
clust_data = np.append(np.array(x_label).reshape(len(x_label), 1), np.array(y).reshape(len(y), 1), axis=1)
collabel = ('Check fail type or OK', 'Number of occurences')
axs[0].axis('tight')
axs[0].axis('off')
the_table = axs[0].table(cellText=clust_data, colLabels=collabel, loc='center')
axs[1].pie(y, labels=x_label, autopct='%1.1f%%', explode=[0.1 for dummy in range(len(x_label))])
plt.show()
if __name__ == '__main__':
import example_data as ex
from calc_structure import CylinderAndCurvedPlate, Structure, Shell
shell_main_dict = ex.shell_main_dict
shell_main_dict['geometry'] = [7, '']
#Structure(ex.obj_dict_cyl_ring)
#Structure(ex.obj_dict_cyl_heavy_ring)
# my_cyl = CylinderAndCurvedPlate(main_dict = ex.shell_main_dict, shell= Shell(ex.shell_dict),
# long_stf= Structure(ex.obj_dict_cyl_long2),
# ring_stf = Structure(ex.obj_dict_cyl_ring2),
# ring_frame= Structure(ex.obj_dict_cyl_heavy_ring2))
my_cyl = CylinderAndCurvedPlate(main_dict = ex.shell_main_dict, shell= Shell(ex.shell_dict),
long_stf= Structure(ex.obj_dict_cyl_long2),
ring_stf = None,# Structure(ex.obj_dict_cyl_ring2),
ring_frame= None)#Structure(ex.obj_dict_cyl_heavy_ring2))
shell_upper_bounds = np.array( [0.03, 5, 5, 5, 10, None, None, None])
shell_deltas = np.array( [0.005, 0.5, 1, 0.1,1, None, None, None])
shell_lower_bounds = np.array( [0.02, 5, 5, 5, 10, None, None, None])
long_upper_bounds = np.array( [0.8, None, 0.5, 0.02, 0.2, 0.03, None, None])
long_deltas = np.array( [0.1, None, 0.1, 0.01, 0.1, 0.01, None, None])
long_lower_bounds = np.array( [0.7, None, 0.3, 0.01, 0.1, 0.01, None, None])
ring_stf_upper_bounds = np.array( [None, None, 0.5, 0.018, 0.2, 0.03, None, None])
ring_stf_deltas = np.array( [None, None, 0.1, 0.004, 0.1, 0.01, None, None])
ring_stf_lower_bounds = np.array( [None, None, 0.3, 0.010, 0.1, 0.010, None, None])
ring_frame_upper_bounds = np.array( [None, None, 0.9, 0.04, 0.3, 0.04, None, None])
ring_frame_deltas = np.array( [None, None, 0.2, 0.01, 0.1, 0.01, None, None])
ring_frame_lower_bounds = np.array( [None, None, 0.7, 0.02, 0.2, 0.02, None, None])
max_var = [shell_upper_bounds, long_upper_bounds, ring_stf_upper_bounds, ring_frame_upper_bounds]
deltas = [shell_deltas, long_deltas, ring_stf_deltas, ring_frame_deltas]
min_var = [shell_lower_bounds, long_lower_bounds, ring_stf_lower_bounds, ring_frame_lower_bounds]
results = run_optmizataion(initial_structure_obj=my_cyl, min_var=min_var, max_var=max_var, deltas=deltas,
cylinder=True, use_weight_filter=True)
shell = ['Shell thk. [mm]', 'Shell radius [mm]' , 'l rings [mm]', 'L shell [mm]', 'L tot. [mm]', 'N/A - future', 'N/A - future', 'N/A - future']
stf_long = ['Spacing [mm]', 'Plate thk. [mm]', 'Web height [mm]', 'Web thk. [mm]', 'Flange width [mm]', 'Flange thk. [mm]', 'N/A - future', 'N/A - future']
stf_ring = ['N/A', 'Plate thk. [mm]', 'Web height [mm]', 'Web thk. [mm]', 'Flange width [mm]', 'Flange thk. [mm]', 'N/A - future', 'N/A - future']
# obj_dict = ex.obj_dict_sec_error
# fat_obj = ex.get_fatigue_object_problematic()
# fp = ex.get_fatigue_pressures_problematic()
# fat_press = ((fp['p_ext']['loaded'],fp['p_ext']['ballast'],fp['p_ext']['part']),
# (fp['p_int']['loaded'],fp['p_int']['ballast'],fp['p_int']['part']))
# x0 = [obj_dict['spacing'][0], obj_dict['plate_thk'][0], obj_dict['stf_web_height'][0], obj_dict['stf_web_thk'][0],
# obj_dict['stf_flange_width'][0], obj_dict['stf_flange_thk'][0], obj_dict['span'][0], 10]
#
# obj = calc.Structure(obj_dict)
# lat_press = 427.235
# calc_object = calc.CalcScantlings(obj_dict)
# lower_bounds = np.array([0.875, 0.012, 0.3, 0.012, 0.1, 0.012, 3.5, 10])
# upper_bounds = np.array([0.875, 0.025, 0.5, 0.018, 0.2, 0.03, 3.5, 10])
# deltas = np.array([0.025, 0.001, 0.01, 0.001, 0.01, 0.001])
#
#
# t1 = time.time()
# #
# results = run_optmizataion(obj, lower_bounds,upper_bounds, lat_press, deltas, algorithm='anysmart',
# fatigue_obj=fat_obj, fat_press_ext_int=fat_press, use_weight_filter=True)
#
# print(results[1])
# print(results[1].get_dnv_min_section_modulus(lat_press))
# print(min([round(results[1].get_section_modulus()[0], 5), round(results[1].get_section_modulus()[1], 5)]))
# t1 = time.time()
# check_ok_array, check_array, section_array = list(), list(), list()
#
# for check_ok, check, section in results[4]:
# check_ok_array.append(check_ok)
# check_array.append(check)
# section_array.append(section)
# check_ok_array, check_array, section_array = np.array(check_ok_array),\
# np.array(check_array),\
# np.array(section_array)
#
# x_label = np.unique(check_array)
# y = [np.count_nonzero(check_array == item) for item in np.unique(check_array)]
#
# fig, axs = plt.subplots(2, 1)
# clust_data = np.append(np.array(x_label).reshape(len(x_label),1), np.array(y).reshape(len(y),1), axis=1)
# collabel = ('Check fail type or OK', 'Number of occurences')
# axs[0].axis('tight')
# axs[0].axis('off')
# the_table = axs[0].table(cellText=clust_data, colLabels=collabel, loc='center')
# axs[1].pie(y, labels = x_label, autopct='%1.1f%%', explode=[0.1 for dummy in range(len(x_label))])
# plt.show()
#
# cmap = plt.cm.get_cmap(plt.cm.viridis, len(x_label))
#
# Create data
# N = 60
# x = section_array[:,0] * section_array[:,1]
# y = section_array[:,2] * section_array[:,3]
# z = section_array[:,4] * section_array[:,5]
#
# #data = (g1, g2, g3)
#
# groups = x_label
# colors = "bgrcmykw"
# color_dict = dict()
# for idx, group in enumerate(groups):
# color_dict[group] = colors[idx]
#
# # Create plot
# fig = plt.figure()
# #ax = fig.add_subplot(1, 1, 1)
# ax = fig.gca(projection='3d')
#
# for xdata, ydata, zdata, group in zip(x, y, z, groups):
# if group == 'Check OK':
# ax.scatter(x, y, z, alpha= 0.6 if group != 'Weight filter' else 0.2,
# c=color_dict[group], edgecolors='none', s=5, label=group)
#
# plt.title('Matplot 3d scatter plot')
# plt.legend(loc=2)
# plt.show()
# for swarm_size in [100, 1000, 10000, 100000, 1000000]:
# t1 = time.time()
#
# pso_options = (swarm_size, 0.5, 0.5, 0.5, 100, 1e-8, 1e-8)
# results = run_optmizataion(obj, upper_bounds, lower_bounds, lat_press, deltas, algorithm='anysmart',
# fatigue_obj=fat_obj, fat_press_ext_int=fat_press, pso_options=pso_options)[0]
# print('Swarm size', swarm_size, 'running time', time.time()-t1, results.get_one_line_string())
# fat_press_ext_int = list()
# for pressure in ex.get_geo_opt_fat_press():
# fat_press_ext_int.append(((pressure['p_ext']['loaded'], pressure['p_ext']['ballast'],
# pressure['p_ext']['part']),
# (pressure['p_int']['loaded'], pressure['p_int']['ballast'],
# pressure['p_int']['part'])))
#
# opt_girder_prop = (0.018, 0.25,0.015, 0,0, 1.1,0.9)
#
# results = run_optmizataion(ex.get_geo_opt_object(), lower_bounds, upper_bounds, ex.get_geo_opt_presure(), deltas,
# is_geometric=True, fatigue_obj=ex.get_geo_opt_fatigue(),
# fat_press_ext_int=fat_press_ext_int,
# slamming_press=ex.get_geo_opt_slamming_none(), load_pre=False,
# opt_girder_prop= opt_girder_prop,
# min_max_span=(1,12), tot_len=12)
# import pickle
# with open('geo_opt_2.pickle', 'rb') as file:
# geo_results = pickle.load(file)
#
# print(geo_results.keys())
# print(geo_results[1][0])
# for val in range(6):
# plot_optimization_results(geo_results[3][1][val]) | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/optimize.py | optimize.py |
try:
import any_files.pl_stf_window as plstf
import any_files.helper as hlp
except ModuleNotFoundError:
import ANYstructure.any_files.pl_stf_window as plstf
import ANYstructure.any_files.helper as hlp
def point_to_js_command(point_coord, point_name):
'''
Returning a js script.
:param point_coord:
:param point_name:
:return:
'''
return point_name + ' = Point('+str(point_coord[0])+', 0, '+str(point_coord[1]) + ');\n'
def line_to_js_command_reference(from_point, to_point, curve_name):
'''
Returning a js script based on reference modelling.
:param from_point:
:param to_point:
:param curve_name:
:return:
'''
return curve_name +' = CreateLineTwoPoints(' + 'point'+str(from_point)+','+ ' point' + \
str(to_point) + ');\n'
def section_property_to_js(section: plstf.Section = None):
'''
Sct3 = BarSection(0.25, 0.015);
Sct1 = UnsymISection(0.35, 0.02, 0, 0, 0, 0.15, 0.075, 0.02);
Sct2 = LSection(0.3, 0.012, 0.1, 0.02);
'''
if section.stf_type == 'T':
js_def = 'UnsymISection('+str(section.stf_web_height)+', '+str(section.stf_web_thk)+', 0, 0, 0, '+ \
str(section.stf_flange_width)+', '+str(section.stf_flange_width/2)+', '+\
str(section.stf_flange_thk)+');\n'
elif section.stf_type in ['L', 'L-bulb']:
js_def = 'LSection('+str(section.stf_web_height)+', '+str(section.stf_web_thk)+', '+ \
str(section.stf_flange_width)+', '+ str(section.stf_flange_thk)+');\n'
else:
js_def = 'BarSection('+str(section.stf_web_height)+', '+str(section.stf_web_thk)+');\n'
ret_str = section.__str__() + ' = ' + js_def
ret_str.replace('-', '_')
return ret_str
class JSfile:
'''
An object representation of the js file.
'''
def __init__(self, points, lines, sections: plstf.Section = None, line_to_struc = None):
super(JSfile, self).__init__()
self._output_lines = list()
self._points = points
self._lines = lines
self._sections = sections
self._line_to_struc = line_to_struc
@property
def output_lines(self):
return self._output_lines
def write_points(self):
''' Writing point in the point list. '''
for point_name, point_coord in self._points.items():
self._output_lines.append(point_to_js_command(point_coord, point_name))
def write_lines(self):
''' Writing the line list. '''
for line_name, point_to_point in self._lines.items():
self._output_lines.append(line_to_js_command_reference(point_to_point[0], point_to_point[1], line_name))
def write_sections(self):
''' Exporting all sections. '''
for section in self._sections:
self._output_lines.append(section_property_to_js(section))
def write_beams(self):
'''
Making beams and assining properties.
Bm1 = Beam(line3);
Bm1.section = T_400_0x12_0__250_0x14_0;
'''
for line_name, line_prop in self._line_to_struc.items():
if line_prop[0].Stiffener is not None:
beam_name = 'ANYbm'+str(hlp.get_num(line_name))
self.output_lines.append(beam_name+' = Beam('+line_name+');\n')
section = plstf.Section(line_prop[0].Stiffener.get_structure_prop())
self.output_lines.append(beam_name + '.section = '+section.__str__()+';\n')
if __name__ == '__main__':
import example_data as test
from tkinter import filedialog
imp_file = open('test_js.js', 'w')
JS = JSfile(test.get_point_dict(), test.get_line_dict(), test.get_section_list(),
line_to_struc=test.get_line_to_struc())
JS.write_points()
JS.write_lines()
JS.write_sections()
JS.write_beams()
imp_file.writelines(JS.output_lines)
imp_file.close() | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/sesam_interface.py | sesam_interface.py |
import tkinter as tk
import os
from _tkinter import TclError
class CreateLoadFactorWindow:
'''
self._load_factors_dict = {'dnva':[1.3,1.2,0.7], 'dnvb':[1,1,1.3], 'tanktest':[1,1,0]} # DNV loads factors
'''
def __init__(self,master, app=None):
super(CreateLoadFactorWindow, self).__init__()
self._frame = master
self._frame.wm_title("Load factor modifications here.")
self._frame.geometry('800x800')
self._frame.grab_set()
self._app = app
if __name__ == '__main__':
self._load_factors_dict = {'dnva': [1.3, 1.2, 0.7], 'dnvb': [1, 1, 1.3],
'tanktest': [1, 1, 0]} # DNV loads factors
else:
self._load_factors_dict = app._load_factors_dict
self.new_conda_lff1 = tk.DoubleVar()
self.new_conda_lff2 = tk.DoubleVar()
self.new_conda_lff3 = tk.DoubleVar()
self.new_condb_lff1 = tk.DoubleVar()
self.new_condb_lff2 = tk.DoubleVar()
self.new_condb_lff3 = tk.DoubleVar()
self.new_condtt_lff1 = tk.DoubleVar()
self.new_condtt_lff2 = tk.DoubleVar()
self.new_condtt_lff3 = tk.DoubleVar()
self.new_change_default = tk.BooleanVar()
self.new_change_existing = tk.BooleanVar()
self.new_conda_lff1.set(self._load_factors_dict['dnva'][0])
self.new_conda_lff2.set(self._load_factors_dict['dnva'][1])
self.new_conda_lff3.set(self._load_factors_dict['dnva'][2])
self.new_condb_lff1.set(self._load_factors_dict['dnvb'][0])
self.new_condb_lff2.set(self._load_factors_dict['dnvb'][1])
self.new_condb_lff3.set(self._load_factors_dict['dnvb'][2])
self.new_condtt_lff1.set(self._load_factors_dict['tanktest'][0])
self.new_condtt_lff2.set(self._load_factors_dict['tanktest'][1])
self.new_condtt_lff3.set(self._load_factors_dict['tanktest'][2])
ent_w = 20
tk.Label(self._frame, text='Static and dynamic load factors is specified here', font='Verdana 15 bold')\
.grid(row = 1, column = 1, sticky = tk.W)
tk.Label(self._frame, text='Note that DNV is used as reference, '
'but the load factors can be any other rule set such as ISO.', font='Verdana 8 bold')\
.grid(row = 2, column = 1, sticky = tk.W)
tk.Label(self._frame, text=' ', font='Verdana 8 bold')\
.grid(row = 3, column = 1, sticky = tk.W)
tk.Label(self._frame, text='Condition a) - Static load factor "unknown loads"', font='Verdana 8 bold')\
.grid(row = 4, column = 1, sticky = tk.W)
tk.Label(self._frame, text='Condition a) - Static load factor well defined loads', font='Verdana 8 bold')\
.grid(row = 5, column = 1, sticky = tk.W)
tk.Label(self._frame, text='Condition a) - Dynamic load factor', font='Verdana 8 bold')\
.grid(row = 6, column = 1, sticky = tk.W)
self.ent_conda_lf1 = tk.Entry(self._frame, textvariable=self.new_conda_lff1, width=ent_w)
self.ent_conda_lf2 = tk.Entry(self._frame, textvariable=self.new_conda_lff2, width=ent_w)
self.ent_conda_lf3 = tk.Entry(self._frame, textvariable=self.new_conda_lff3, width=ent_w)
self.ent_conda_lf1.grid(row=4, column=2)
self.ent_conda_lf2.grid(row=5, column=2)
self.ent_conda_lf3.grid(row=6, column=2)
tk.Label(self._frame, text=' ', font='Verdana 8 bold')\
.grid(row = 7, column = 1, sticky = tk.W)
tk.Label(self._frame, text='Condition b) - Static load factor "unknown loads"', font='Verdana 8 bold')\
.grid(row = 8, column = 1, sticky = tk.W)
tk.Label(self._frame, text='Condition b) - Static load factor well defined loads', font='Verdana 8 bold')\
.grid(row = 9, column = 1, sticky = tk.W)
tk.Label(self._frame, text='Condition b) - Dynamic load factor', font='Verdana 8 bold')\
.grid(row = 10, column = 1, sticky = tk.W)
self.ent_condb_lf1 = tk.Entry(self._frame, textvariable=self.new_condb_lff1, width=ent_w)
self.ent_condb_lf2 = tk.Entry(self._frame, textvariable=self.new_condb_lff2, width=ent_w)
self.ent_condb_lf3 = tk.Entry(self._frame, textvariable=self.new_condb_lff3, width=ent_w)
self.ent_condb_lf1.grid(row=8, column=2)
self.ent_condb_lf2.grid(row=9, column=2)
self.ent_condb_lf3.grid(row=10, column=2)
tk.Label(self._frame, text=' ', font='Verdana 8 bold')\
.grid(row = 11, column = 1, sticky = tk.W)
tk.Label(self._frame, text='Tank test) - Static load factor "unknown loads"', font='Verdana 8 bold')\
.grid(row = 12, column = 1, sticky = tk.W)
tk.Label(self._frame, text='Tank test) - Static load factor well defined loads', font='Verdana 8 bold')\
.grid(row = 13, column = 1, sticky = tk.W)
tk.Label(self._frame, text='Tank test) - Dynamic load factor', font='Verdana 8 bold')\
.grid(row = 14, column = 1, sticky = tk.W)
self.ent_condtt_lf1 = tk.Entry(self._frame, textvariable=self.new_condtt_lff1, width=ent_w)
self.ent_condtt_lf2 = tk.Entry(self._frame, textvariable=self.new_condtt_lff2, width=ent_w)
self.ent_condtt_lf3 = tk.Entry(self._frame, textvariable=self.new_condtt_lff3, width=ent_w)
self.ent_condtt_lf1.grid(row=12, column=2)
self.ent_condtt_lf2.grid(row=13, column=2)
self.ent_condtt_lf3.grid(row=14, column=2)
tk.Label(self._frame, text=' ', font='Verdana 8 bold')\
.grid(row = 15, column = 1, sticky = tk.W)
# tk.Label(self._frame, text='Change all current load factors', font='Verdana 8 bold')\
# .grid(row = 16, column = 1, sticky = tk.W)
# tk.Checkbutton(self._frame, variable=self.new_change_existing)\
# .grid(row=17, column=1, sticky = tk.W)
# tk.Label(self._frame, text='Change default load factors', font='Verdana 8 bold')\
# .grid(row = 18, column = 1, sticky = tk.W)
# tk.Checkbutton(self._frame, variable=self.new_change_default)\
# .grid(row=19, column=1, sticky=tk.W)
#
tk.Label(self._frame, text=' ', font='Verdana 8 bold')\
.grid(row = 16, column = 1, sticky = tk.W)
destroy_and_return = tk.Button(self._frame, text='Return specified load factors and change existing',
command=self.return_load_factors, bg='green', font='Verdana 12', fg='yellow')
destroy_and_return.grid(row = 17, column = 1)
tk.Label(self._frame, text=' ', font='Verdana 8 bold')\
.grid(row = 18, column = 1)
try:
img_file_name = 'img_dnv_load_combinations.gif'
if os.path.isfile('images/' + img_file_name):
file_path ='images/' + img_file_name
else:
file_path = app._root_dir + '/images/' + img_file_name
photo_transverse = tk.PhotoImage(file=file_path)
label_trans = tk.Label(self._frame, image=photo_transverse)
label_trans.image = photo_transverse # keep a reference!
label_trans.grid(row = 19, column = 1, columnspan = 2)
except TclError:
pass
def return_load_factors(self):
'''
self._load_factors_dict = {'dnva':[1.3,1.2,0.7], 'dnvb':[1,1,1.3], 'tanktest':[1,1,0]} # DNV loads factors
:return:
'''
self._load_factors_dict['dnva'] = [self.new_conda_lff1.get(), self.new_conda_lff2.get(),
self.new_conda_lff3.get()]
self._load_factors_dict['dnvb'] = [self.new_condb_lff1.get(), self.new_condb_lff2.get(),
self.new_condb_lff3.get()]
self._load_factors_dict['tanktest'] = [self.new_condtt_lff1.get(), self.new_condtt_lff2.get(),
self.new_condtt_lff3.get()]
if __name__ == '__main__':
self._frame.destroy()
print({'returned lf dict': self._load_factors_dict,
'change exisiting': self.new_change_existing.get(),
'change default': self.new_change_default.get()})
return
self._app.on_close_load_factor_window({'returned lf dict': self._load_factors_dict,
'change exisiting': self.new_change_existing.get(),
'change default': self.new_change_default.get()})
self._frame.destroy()
if __name__ == '__main__':
root = tk.Tk()
my_app = CreateLoadFactorWindow(root,app=None)
root.mainloop() | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/load_factor_window.py | load_factor_window.py |
import numpy as np
from xlwings import App, Book
class PulsExcel():
''' This class open a PulsExcel work.
Input and output structure data and results.
Running macros.
'''
def __init__(self, path_and_file_to_book: str = None, visible = True):
'''
:param path_and_file_to_book: Path and file name or just filename in base directory.
:param visible: If excel shall open in windows or run in the background.
'''
super(PulsExcel, self).__init__()
self.app = App(visible=visible)
self.book = Book(path_and_file_to_book)
self.names_sp = {'Identification': 1, 'Length of panel': 3, 'Stiffener spacing': 4, 'Plate thickness': 5,
'Number of primary stiffeners': 6, 'Stiffener type (L,T,F)': 7, 'Stiffener boundary': 8,
'Stiff. Height': 9, 'Web thick.': 10,'Flange width': 11, 'Flange thick.': 12, 'Tilt angle': 13,
'Number of sec. stiffeners': 14, 'Modulus of elasticity': 21, "Poisson's ratio": 22,
'Yield stress plate': 23, 'Yield stress stiffener': 24, 'Axial stress': 25, 'Trans. stress 1': 26,
'Trans. stress 2': 27, 'Shear stress': 28, 'Pressure (fixed)': 29, 'In-plane support': 30}
self.names_up = {'Identification': 1, 'Length of plate': 3, 'Width of c': 4, 'Plate thickness': 5,
'Modulus of elasticity': 6, "Poisson's ratio":7, 'Yield stress plate': 8,
'Axial stress 1': 9, 'Axial stress 2': 10,'Trans. stress 1': 11,
'Trans. stress 2': 12, 'Shear stress': 13, 'Pressure (fixed)': 14, 'In-plane support': 15,
'Rot left': 16, 'Rot right': 17, 'Rot upper': 18, 'Rot lower': 19}
def close_book(self, save = False):
''' Closing ass and book. '''
if save:
self.book.save()
self.book.close()
self.app.kill()
def read_data(self, row_number: int = 1, column_number: int = 1):
''' Read one cell in the sheet
Row and columns starts at 1 (not 0 as in python general)
'''
return self.book.sheets[1].range((row_number, column_number)).value
def set_cell_value(self, row_number: int = 1, column_number: int = 1, cell_value = None, sheet_num = 1):
'''
Set values of a cell.
:param row_number:
:param column_number:
:param cell_value:
:return:
'''
#print('Row', row_number, 'Col', column_number, 'Cell', cell_value)
self.book.sheets[sheet_num].range((row_number, column_number)).value = cell_value
def set_one_row(self, row_number: int = 20, data_dict: dict = None):
'''
Set one row of values
:param row_number: The row to set.
:param data_dict: Data for one panel.
:return:
'''
for name, col_num in self.names_sp.items():
self.set_cell_value(row_number, col_num, data_dict[name])
def set_multiple_rows(self, start_row:int = 20, data_dict: dict = None):
'''
:param start_row: First row to input.
:param list_of_dicts: The data to be set.
:return:
'''
row_number = start_row
for id, data in data_dict.items():
self.set_one_row(row_number, data)
row_number += 1
def set_multiple_rows_batch(self, data_dict: dict = None):
'''
:param start_row: First row to input.
:param list_of_dicts: The data to be set.
:return:
'''
sp_dict, up_dict = dict(), dict()
for key, val in data_dict.items():
if val['sp or up'] == 'SP':
sp_dict[key] = val
else:
up_dict[key] = val
if len(sp_dict)>0:
for name, col_num in self.names_sp.items():
start_row = 20
self.book.sheets[1].range((start_row, col_num)).options(expand='table', transpose=True).value = \
[val[name] for val in sp_dict.values()]
if len(up_dict) > 0:
for name, col_num in self.names_up.items():
start_row = 21
self.book.sheets[4].range((start_row, col_num)).options(expand='table', transpose=True).value = \
[val[name] for val in up_dict.values()]
return len(sp_dict)>0, len(up_dict)>0
def calculate_panels(self, sp = True, up = False):
''' Calculate the panels in the sheet. '''
if sp:
run_macro_sp = self.app.macro('Sheet1.cmdCalculatePanels_Click')
run_macro_sp()
if up:
run_macro_up = self.app.macro('Sheet3.CalculatePanelsU3')
run_macro_up()
def get_results_one_cell(self, row_number: int = 1, column_number: int = 1):
''' Return the results in one cell of a calculated panel. '''
return self.book.sheets[2].range((row_number, column_number)).value
def get_results_one_row(self, row_number: int = 15):
'''
Return one row.
:param row_number:
:return: dict : [value, unit] e.g. [3000, 'mm']
'''
return_dict = dict()
return_dict['Identification'] = self.get_results_one_cell(row_number, column_number=1)
# print('start')
# for idx in range(3,74):
# if self.get_results_one_cell(11, column_number=idx) != None:
# return_dict[self.get_results_one_cell(11, column_number=idx)] = {}
# print('stop')
current_top_row = ''
for idx in range(3,74):
if self.get_results_one_cell(11, column_number=idx) != None:
current_top_row = self.get_results_one_cell(11, column_number=idx)
return_dict[current_top_row] = {}
return_dict[current_top_row][self.get_results_one_cell(12, column_number=idx)] = \
[self.get_results_one_cell(row_number, column_number=idx),
self.get_results_one_cell(14, column_number=idx)]
return return_dict
def get_all_results_batch(self, sp = True, up = False):
spup = {(True, True) : ['SP', 'UP'], (True, False) : ['SP'], (False, True) : ['UP']}
return_dict = {}
for run in spup[(sp, up)]:
all_ids = self.book.sheets[2 if run == 'SP' else 5].range('A15').expand().value
if type(all_ids) != list:
all_ids = [all_ids]
all_data = np.array(self.book.sheets[2 if run == 'SP' else 5].range('C12').expand().value)
all_top_names = np.array(self.book.sheets[2 if run == 'SP' else 5].range('C11:BU11' if run == 'SP' else 'C11:AS11').value)
all_names = all_data[0]
all_data = all_data[3:]
all_units = np.array(self.book.sheets[2 if run == 'SP' else 5].range('C14:BU14'if run == 'SP' else 'C14:AS14').value)
current_top_row = ''
for data_idx, id in enumerate(all_ids):
return_dict[id] = {}
return_dict[id]['Identification'] = id
same, same_idx = False, 2
for top_name, name, data, unit in zip(all_top_names, all_names, all_data[data_idx], all_units):
if top_name != None:
current_top_row = top_name
return_dict[id][current_top_row] = {}
if name in return_dict[id][current_top_row].keys():
same = True
name = name + ' ' +str(same_idx)
same_idx += 1
elif same == True and name not in return_dict[id][current_top_row].keys():
same = False
same_idx = 2
return_dict[id][current_top_row][name] = [data, unit]
if run == 'UP':
return_dict[id]['Ultimate capacity']['Actual usage Factor'] = \
return_dict[id]['Ultimate capacity']['Usage factor']
return_dict[id]['Ultimate capacity']['Allowable usage factor'] = \
return_dict[id]['Ultimate capacity']['Alowable usage']
return_dict[id]['Buckling strength']['Actual usage Factor'] = \
return_dict[id]['Buckling strength']['Usage factor']
return_dict[id]['Buckling strength']['Allowable usage factor'] = \
return_dict[id]['Buckling strength']['Alowable usage']
return_dict[id]['Ultimate capacity'].pop('Usage factor')
return_dict[id]['Ultimate capacity'].pop('Alowable usage')
return_dict[id]['Buckling strength'].pop('Usage factor')
return_dict[id]['Buckling strength'].pop('Alowable usage')
return return_dict
def get_all_results(self):
'''
Return all results in run.
:return:
'''
return_dict, found_last, row_number = dict(), False, 15
while found_last is False:
this_row = self.get_results_one_row(row_number)
if this_row['Identification'] is not None:
return_dict[row_number] = this_row
row_number += 1
else:
found_last = True
return return_dict
if __name__ == '__main__':
ex1 = {'line25': {'Identification': 'line25', 'Length of plate': 3, 'Width of c': 800.0, 'Plate thickness': 20.0,
'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0,
'Axial stress 1': 60.0, 'Axial stress 2': 60.0, 'Trans. stress 1': 0, 'Trans. stress 2': 0,
'Shear stress': 10.0, 'Pressure (fixed)': 0.0, 'In-plane support': 'GL', 'Rot left': 'SS',
'Rot right': 'SS', 'Rot upper': 'SS', 'Rot lower': 'SS', 'sp or up': 'UP'}}
my_puls = PulsExcel(r'C:\Github\ANYstructure\ANYstructure\PULS\PulsExcel_new - Copy.xlsm',
visible=True)
my_puls.set_multiple_rows_batch(ex1)
# my_puls.set_multiple_rows(20, [ex.ex1, ex.ex2, ex.ex3, ex.ex4, ex.ex5, ex.ex6])
# my_puls.calculate_panels()
#my_puls.set_one_row(data_dict=ex.ex1['line1'])
# [print(key, value) for key, value in my_puls.get_all_results().items()]
# my_puls.close_book() | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/excel_inteface.py | excel_inteface.py |
import any_files.example_data as test
from any_files.calc_loads import *
from any_files.helper import *
# try:
# import any_files.example_data as test
# from any_files.calc_loads import *
# from any_files.helper import *
# except ModuleNotFoundError:
# import ANYstructure.any_files.example_data as test
# from ANYstructure.any_files.calc_loads import *
# from ANYstructure.any_files.helper import *
import tkinter as tk
from tkinter import messagebox
class CreateLoadWindow():
'''
This class defines the external pressures on the hull (static and dynamic).
'''
@staticmethod
def return_dummy_manual(line):
load = 'manual'
combination = 'manual'
load_comb_dict = {}
load_comb_dict[(combination, line, load)] = [tk.DoubleVar(), tk.DoubleVar(), tk.IntVar()]
load_comb_dict[(combination, line, load)][0].set(0)
load_comb_dict[(combination, line, load)][1].set(1)
load_comb_dict[(combination, line, load)][2].set(1)
return load_comb_dict
def __init__(self, master,app=None):
super(CreateLoadWindow, self).__init__()
limit_states = ['ULS', 'FLS']
if __name__ == '__main__':
options_cond = ['loaded', 'ballast', 'tanktest', 'part', 'slamming']
self._load_factors_dict = {'dnva': [1.3, 1.2, 0.7], 'dnvb': [1, 1, 1.3], 'tanktest': [1, 1, 0]}
self._load_objects = {}
self._load_comb_dict = {}
self._line_dict = test.get_line_dict()
self._load_count = 0
self._slamming_load_count = 0
self._point_dict = test.get_point_dict()
self._canvas_scale = 20
self._structure_types = {'vertical': ['BBS', 'SIDE_SHELL', 'SSS'],
'horizontal': ['BOTTOM', 'BBT', 'HOPPER', 'MD']}
else:
self.app = app
self._load_factors_dict = app._load_factors_dict
options_cond = app._load_conditions
self._load_objects = app._load_dict
self._load_comb_dict = app._new_load_comb_dict
self._line_dict = app._line_dict
self._load_count = 0
self._point_dict = app._point_dict
self._canvas_scale = app._canvas_scale
self._structure_types = app._structure_types
self._slamming_load_count = 0
self._point_is_active = False
self._active_point = ''
self._point_is_active = False
self._active_lines = []
self._add_to_lines = True
self._lines_add_to_load = []
listbox_select = 'extended'
frame_dim = (1500,980)
self._canvas_origo = (50,720-50)
self._canvas_dim = (1000,720)
self._frame = master
self._frame.wm_title("Load properties")
self._frame.geometry(str(frame_dim[0])+'x'+str(frame_dim[1]))
self._frame.grab_set()
self._frame.protocol("WM_DELETE_WINDOW", self.on_closing)
tk.Frame(self._frame, width=5, height=980, bg="black", colormap="new").place(x =450, y = 0)
tk.Frame(self._frame, width=455, height=5, bg="black", colormap="new").place(x = 0, y = 320)
tk.Frame(self._frame, width=5, height=190, bg="black", colormap="new").place(x =1000, y = 0)
tk.Frame(self._frame, width=5, height=190, bg="black", colormap="new").place(x=1250, y=0)
tk.Frame(self._frame, width=1100, height=5, bg="black", colormap="new").place(x = 450, y = 190)
# Main canvas creation
self._main_canvas = tk.Canvas(self._frame,width=self._canvas_dim[0], height=self._canvas_dim[1],
background='azure', relief = 'groove', borderwidth=2)
self._main_canvas.place(relx=0.32,rely=0.25)
self._global_shrink = 1
base_canvas_dim = [1000, 720] # do not modify this, sets the "orignal" canvas dimensions.
self._canvas_dim = [int(base_canvas_dim[0] *self._global_shrink),
int(base_canvas_dim[1] *self._global_shrink)]
self._canvas_base_origo = [50, base_canvas_dim[1] - 50] # 50 bottom left location of the canvas, (0,0)
self._canvas_draw_origo = list(self._canvas_base_origo)
self._previous_drag_mouse = list(self._canvas_draw_origo)
# --- slider (used to zoom) ----
self._slider = tk.Scale(self._frame,from_=60,to = 1, command = self.slider_used,
background='azure', relief = 'groove', borderwidth=2)
self._slider.set(self._canvas_scale)
self._slider.place(relx=0.32,rely=0.25)
# --- Dynamic load input ---
ent_x = 200
delta_y = 30
options_cond = tuple(options_cond)
load_vert_start = 90
tk.Label(self._frame, text='1. Dynamic loads', font='Verdana 10 bold', fg = 'red')\
.place(x=10, y=load_vert_start - 80)
tk.Label(self._frame, text='Define dynamic loads as an polynominal curve.\n'
'Can be third degree, second degree, linear or constant \n'
,
font="Verdana 8 bold",justify = tk.LEFT).place(x=10, y=load_vert_start - 50)
tk.Button(self._frame, text='Create dynamic load', command=self.create_dynamic_load_object,
font='Verdana 9 bold', fg='yellow', bg = 'green' )\
.place(x=270, y=load_vert_start + delta_y *6)
self.close_window= tk.Button(self._frame, text='Press this to: \n'
'Save loads and \n'
'close the load window. ',font="Verdana 9 bold",
command=self.save_and_close, bg = 'green', fg = 'yellow')
self.close_window.place(x=ent_x*6.35, y=load_vert_start-20)
self._new_dynamic_load_name = tk.StringVar()
self._new_dynamic_load_name.set('load' + str(self._load_count))
self._new_load_poly_third = tk.DoubleVar()
self._new_load_poly_second = tk.DoubleVar()
self._new_load_poly_first = tk.DoubleVar()
self._new_load_poly_const = tk.DoubleVar()
self._new_load_manual_pressure = tk.DoubleVar()
self._new_dyn_load_condition = tk.StringVar()
self._new_limit_state = tk.StringVar()
self._new_limit_state.set('ULS')
self._new_slamming_pressure = tk.DoubleVar()
self._new_slamming_pressure_name = tk.StringVar()
self._new_slamming_pressure_name.set('slamming')
self._new_slamming_pl_mult = tk.DoubleVar()
self._new_slamming_stf_mult = tk.DoubleVar()
self._new_slamming_pl_mult.set(1.0)
self._new_slamming_stf_mult.set(1.0)
ent_w = 15
ent_dyn_load_name = tk.Entry(self._frame, textvariable=self._new_dynamic_load_name, width=ent_w*2)
ent_load_poly_third = tk.Entry(self._frame, textvariable=self._new_load_poly_third, width=ent_w)
ent_load_poly_second = tk.Entry(self._frame, textvariable=self._new_load_poly_second, width=ent_w)
ent_load_poly_first = tk.Entry(self._frame, textvariable=self._new_load_poly_first,width=ent_w)
ent_load_poly_constant = tk.Entry(self._frame, textvariable=self._new_load_poly_const,width=ent_w)
ent_load_condition = tk.OptionMenu(self._frame, self._new_dyn_load_condition, *options_cond)
ent_limit_state = tk.OptionMenu(self._frame, self._new_limit_state, *limit_states)
# Slamming pressures
slx, sly = ent_x*5.6, load_vert_start-40
tk.Label(self._frame,text = 'Load name:').place(x = slx-90, y = sly)
ent_slamming_pressure = tk.Entry(self._frame, textvariable=self._new_slamming_pressure, width=ent_w)
ent_slamming_pressure.place(x = slx, y = sly+delta_y)
ent_slamming_pl_mult = tk.Entry(self._frame, textvariable=self._new_slamming_pl_mult, width=7)
ent_slamming_pl_mult.place(x = slx + 50, y = sly + 1.8*delta_y)
ent_slamming_stf_mult = tk.Entry(self._frame, textvariable=self._new_slamming_stf_mult, width=7)
ent_slamming_stf_mult.place(x = slx + 50, y = sly+2.6*delta_y)
tk.Label(self._frame,text='Pressure [Pa]:').place(x=slx - 90, y=sly+delta_y)
tk.Label(self._frame, text='Plate multiplier, Ppl').place(x=slx - 90, y=sly + 1.8*delta_y)
tk.Label(self._frame, text='Stiffener multiplier, Pst:').place(x=slx - 90, y=sly + 2.6*delta_y)
ent_slamming_pressure_name = tk.Entry(self._frame, textvariable=self._new_slamming_pressure_name, width=ent_w)
ent_slamming_pressure_name.place(x=slx, y=sly)
tk.Button(self._frame, text = 'Create slamming load', command = self.create_slamming_load,
font='Verdana 9 bold', fg='yellow', bg = 'green' ) \
.place(x=slx - 80, y=sly + 3.5*delta_y)
ent_dyn_load_name.place(x=ent_x, y=load_vert_start + 0 * delta_y)
ent_load_poly_third.place(x=ent_x, y=load_vert_start + 1 * delta_y)
ent_load_poly_second.place(x=ent_x, y=load_vert_start + 2 * delta_y)
ent_load_poly_first.place(x=ent_x, y=load_vert_start +3 * delta_y)
ent_load_poly_constant.place(x=ent_x, y=load_vert_start + 4 * delta_y)
ent_load_condition.place(x=ent_x - 5, y=load_vert_start + 5 * delta_y - 5)
ent_limit_state.place(x=ent_x - 5, y=load_vert_start + 6 * delta_y - 5)
tk.Label(self._frame, text='Input load name:').place(x=10, y=load_vert_start + 0*delta_y)
tk.Label(self._frame, text='Third degree poly [x^3]').place(x=10, y=load_vert_start+delta_y)
tk.Label(self._frame, text='Second degree poly [x^2]').place(x=10, y=load_vert_start + 2*delta_y)
tk.Label(self._frame, text='First degree poly [x]').place(x=10, y=load_vert_start + 3*delta_y)
tk.Label(self._frame, text='Constant [C]').place(x=10, y=load_vert_start + 4 * delta_y)
tk.Label(self._frame, text='Load condition').place(x=10, y=load_vert_start + 5 * delta_y)
tk.Label(self._frame, text='Limit state').place(x=10, y=load_vert_start + 6 * delta_y)
# --- Static load input ---
horizontal_start = 500
tk.Label(self._frame, text='2. Static loads', font='Verdana 10 bold', fg = 'red') \
.place(x=horizontal_start, y=load_vert_start - 80)
tk.Label(self._frame, text='3. Slamming pressure', font='Verdana 10 bold', fg = 'red') \
.place(x=horizontal_start+520, y=load_vert_start - 80)
tk.Label(self._frame, text = 'Hydrostatic loads defined by draft.',
font="Verdana 8 bold")\
.place(x = horizontal_start,y = load_vert_start-1.5*delta_y)
tk.Label(self._frame, text = 'Define static draft from sea [m]:')\
.place(x = horizontal_start,y = load_vert_start + delta_y)
tk.Label(self._frame, text='Define name of static load:').place(x=horizontal_start,y=load_vert_start)
tk.Label(self._frame, text='Select load condition:').place(x=horizontal_start,y=load_vert_start + delta_y*2)
self._new_static_load_name = tk.StringVar()
self._new_static_draft = tk.DoubleVar()
self._new_static_condition = tk.StringVar()
self._new_static_load_name.set('static'+str(self._load_count))
tk.Entry(self._frame, textvariable = self._new_static_load_name,width=ent_w)\
.place(x = horizontal_start+200, y = load_vert_start,)
tk.Entry(self._frame, textvariable = self._new_static_draft,width=ent_w)\
.place(x = horizontal_start+200, y = load_vert_start + delta_y)
tk.OptionMenu(self._frame, self._new_static_condition, *options_cond)\
.place(x = horizontal_start+200, y = load_vert_start + 2*delta_y)
tk.Button(self._frame, text = 'Create static load', command = self.create_static_load_object,
font='Verdana 9 bold', fg='yellow', bg = 'green' )\
.place(x = horizontal_start + 340, y = load_vert_start )
# --- showing created loads ---
start_y = 340
tk.Label(self._frame, text='3. Created loads are seen below\n'
'(scroll if not all is shown.)\n'
'DOUBLE CLICK load to see assosiated lines.:',
font="Verdana 10 bold", fg='red').place(x=10, y=start_y)
self._load_obj_box = tk.Listbox(self._frame, height = 15, selectmode = listbox_select, bg='azure',
relief = 'groove', borderwidth=2)
self._load_obj_box.place(x=10, y=start_y + 3 * delta_y)
self._load_obj_box.bind('<<ListboxSelect>>', self.left_click_load_box)
loads_scrollbar = tk.Scrollbar(self._frame)
loads_scrollbar.config(command = self._load_obj_box.yview)
loads_scrollbar.place(x=140, y=start_y + 3 * delta_y)
self._load_obj_box.config(yscrollcommand=loads_scrollbar.set)
tk.Label(self._frame, text = 'Select to see assosiated lines: ').place(x=10, y=start_y + 2*delta_y)
# --- showing the lines applied to the load above ---
self._load_obj_lines_box = tk.Listbox(self._frame, height = 15, selectmode = listbox_select, bg = 'azure',
relief = 'groove', borderwidth=2)
lines_scrollbar = tk.Scrollbar(self._frame)
lines_scrollbar.config(command = self._load_obj_lines_box.yview)
lines_scrollbar.place(x=330, y=start_y+ 3 * delta_y)
self._load_obj_lines_box.config(yscrollcommand=lines_scrollbar.set)
self._load_obj_lines_box.place(x=200, y=start_y+ 3 * delta_y)
self._load_obj_lines_box.bind('<<ListboxSelect>>', self.left_click_load_box)
tk.Label(self._frame, text = '-->',font="Verdana 8 bold").place(x=160, y= load_vert_start + 15 * delta_y )
# --- dropdown meny to choose load to assosiate with lines ---
self._load_options = ['']
self._new_assisiate_load = tk.StringVar()
self._ent_assosiate_load = tk.OptionMenu(self._frame, self._new_assisiate_load, *tuple(self._load_options))
self._ent_assosiate_load.place(relx=0.85,rely=0.21)
# --- Button to assosiate selecte lines to load
tk.Button(self._frame, text = 'Press to add selected lines to selecte load',
command=self.append_line_to_load, fg = 'yellow', bg='green',font='Verdana 9 bold')\
.place(relx=0.32,rely=0.215)
tk.Label(self._frame,text='Select a load in "3." to and then choose lines to apply to load\n '
'(select by clicking lines). Alterntively define manually ------>')\
.place(relx=0.56,rely=0.205)
# --- delete a created load ---
tk.Button(self._frame, text="Delete selected load",command=self.delete_load,
font='Verdana 9 bold', fg='yellow', bg = 'red' )\
.place(x=10, y=start_y + 12 * delta_y)
# --- updating the imported loads from main window ---
if len(self._load_objects) > 0:
self.import_update()
# --- properties canvas to show variables for load ---
self._canvas_properties = tk.Canvas(self._frame, height=200, width=350,
background='azure', relief = 'groove', borderwidth=2)
self._canvas_properties.place(x= 10, y = load_vert_start + delta_y*22.5)
self.controls()
self.draw_canvas()
def delete_load(self):
self._load_objects.pop(self._load_obj_box.get('active'))
self._load_obj_box.delete('active')
def slider_used(self, event):
'''
Action when slider is activated.
:return:
'''
self._canvas_scale = self._slider.get()
self.draw_canvas()
def draw_canvas(self, load_selected=False):
'''
Making the line canvas
:return:
'''
self._main_canvas.delete('all')
# grid for the canavs
self._main_canvas.create_line(self._canvas_draw_origo[0], 0, self._canvas_draw_origo[0], self._canvas_dim[1],
stipple='gray50')
self._main_canvas.create_line(0, self._canvas_draw_origo[1], self._canvas_dim[0], self._canvas_draw_origo[1],
stipple='gray50')
self._main_canvas.create_text(self._canvas_draw_origo[0] - 30 ,
self._canvas_draw_origo[1] + 20 , text='(0,0)',
font='Text 10')
self._main_canvas.create_text([800 ,50],
text='Mouse left click: select lines to loads\n'
'Mouse right click: clear all selection\n'
'Shift key press: add selected line\n'
'Control key press: remove selected line', font='Verdana 8 bold',
fill='red')
# drawing the line dictionary.
if len(self._line_dict) != 0:
for line, value in self._line_dict.items():
color = 'black'
coord1 = self.get_point_canvas_coord('point' + str(value[0]))
coord2 = self.get_point_canvas_coord('point' + str(value[1]))
vector = [coord2[0] - coord1[0], coord2[1] - coord1[1]]
# drawing a bold line if it is selected
if line in self._active_lines:
self._main_canvas.create_line(coord1, coord2, width=6, fill='orange')
self._main_canvas.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 + 10,
text='Line ' + str(get_num(line)), font='Verdand 10 bold',
fill='red')
else:
self._main_canvas.create_line(coord1, coord2, width=3, fill=color)
self._main_canvas.create_text(coord1[0] - 20 + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 + 10,
text='line' + str(get_num(line)), font="Text 8", fill='black')
def button_2_click_and_drag(self,event):
self._canvas_draw_origo = (self._canvas_draw_origo[0]-(self._previous_drag_mouse[0]-event.x),
self._canvas_draw_origo[1]-(self._previous_drag_mouse[1]-event.y))
self._previous_drag_mouse = (event.x,event.y)
self.draw_canvas()
def mouse_scroll(self,event):
self._canvas_scale += event.delta/50
self._canvas_scale = 0 if self._canvas_scale < 0 else self._canvas_scale
self.draw_canvas()
def get_loads(self):
'''
Returning loads
:return:
'''
return self._load_objects
def make_load_comb_dict(self, line, load):
'''
Making the load comb dict
(comb,line,load) : [DoubleVar(),DoubleVar(), IntVar()] #
# {'dnva':[1.3,1.2,0.7], 'dnvb':[1,1,1.3], 'tanktest':[1,1,1]} # DNV loads factors
:return:
'''
for combination in self._load_factors_dict.keys():
factors =self._load_factors_dict[combination]
if load != 'manual':# and (combination, line, load) not in self._load_comb_dict.keys():
#print(combination, line, load)
self._load_comb_dict[(combination, line, load)] = [tk.DoubleVar(), tk.DoubleVar(), tk.IntVar()]
if combination != 'tanktest':
if self._load_objects[load][0].is_static():
if self._load_objects[load][0].get_limit_state() == 'FLS':
self._load_comb_dict[(combination, line, load)][0].set(0)
elif self._load_objects[load][0].is_tank_test():
self._load_comb_dict[(combination, line, load)][0].set(0)
self._load_comb_dict[(combination, line, load)][2].set(0)
else:
self._load_comb_dict[(combination, line, load)][0].set(factors[0])
self._load_comb_dict[(combination, line, load)][2].set(1)
self._load_comb_dict[(combination, line, load)][1].set(0)
else:
self._load_comb_dict[(combination, line, load)][0].set(0)
if self._load_objects[load][0].get_limit_state() == 'FLS':
self._load_comb_dict[(combination, line, load)][1].set(1)
else:
self._load_comb_dict[(combination, line, load)][1].set(factors[2])
self._load_comb_dict[(combination, line, load)][2].set(1)
else:
if self._load_objects[load][0].is_tank_test():
self._load_comb_dict[(combination, line, load)][0].set(1)
self._load_comb_dict[(combination, line, load)][1].set(0)
self._load_comb_dict[(combination, line, load)][2].set(1)
else:
self._load_comb_dict[(combination, line, load)][0].set(0)
self._load_comb_dict[(combination, line, load)][1].set(0)
self._load_comb_dict[(combination, line, load)][2].set(0)
else:
combination = 'manual'
self._load_comb_dict[(combination, line, load)] = [tk.DoubleVar(), tk.DoubleVar(), tk.IntVar()]
self._load_comb_dict[(combination, line, load)][0].set(0)
self._load_comb_dict[(combination, line, load)][1].set(1)
self._load_comb_dict[(combination, line, load)][2].set(1)
def create_dynamic_load_object(self, slamming_load = False):
'''
Creating load object for the selected lines.
'(poly_third = None,poly_second = None, poly_first = None, poly_const = None
, load_condition = None, structure_type = None, man_press = None, static_draft = None)'
:return:
'''
variables = ['poly_third','poly_second', 'poly_first', 'poly_const', 'load_condition',
'man_press', 'static_draft', 'name_of_load', 'limit_state', 'structure_types',
'slamming mult pl', 'slamming mult stf']
existing_load = None
if not slamming_load:
name_of_load = self._new_dynamic_load_name.get()
if name_of_load in self._load_objects.keys():
# Existing load
existing_load = copy.deepcopy(self._load_objects[name_of_load])
self._load_objects.pop(name_of_load)
self._load_obj_box.delete(0,'end')
for load in self._load_objects.keys():
self._load_obj_box.insert('end', load)
values = [self._new_load_poly_third.get(),self._new_load_poly_second.get(),
self._new_load_poly_first.get(),self._new_load_poly_const.get(),
self._new_dyn_load_condition.get(), None, None, name_of_load,
self._new_limit_state.get(), self._structure_types, 1, 1]
else:
name_of_load = self._new_slamming_pressure_name.get()
if name_of_load in self._load_objects.keys():
existing_load = copy.deepcopy(self._load_objects[name_of_load])
self._load_objects.pop(name_of_load)
self._load_obj_box.delete(0, 'end')
for load in self._load_objects.keys():
self._load_obj_box.insert('end', load)
values = [0, 0, 0, self._new_slamming_pressure.get(),
'slamming', None, None, name_of_load,
None, self._structure_types,
self._new_slamming_pl_mult.get(),
self._new_slamming_stf_mult.get()]
count_i = 0
current_load_dict = {}
for item in variables:
current_load_dict[item] = values[count_i]
count_i += 1
self._load_objects[name_of_load] = [Loads(current_load_dict),[] if existing_load is None else existing_load[1]]
self._load_options.append(name_of_load)
self._ent_assosiate_load.destroy()
self._ent_assosiate_load = tk.OptionMenu(self._frame, self._new_assisiate_load, *tuple(self._load_options))
self._ent_assosiate_load.place(relx=0.85,rely=0.21)
self._load_obj_box.insert('end',name_of_load)
if not slamming_load:
self._load_count += 1
self._new_dynamic_load_name.set('load'+str(self._load_count))
else:
self._new_slamming_pressure_name.set('slamming' + str(self._slamming_load_count))
self._slamming_load_count += 1
def create_slamming_load(self):
''' Creates a slamming load object. '''
self.create_dynamic_load_object(slamming_load=True)
def create_static_load_object(self):
'''
Creating static loads.
'(poly_third = None,poly_second = None, poly_first = None, poly_const = None
, load_condition = None, structure_type = None, man_press = None, static_draft = None)'
:return:
'''
variables = ['poly_third','poly_second', 'poly_first', 'poly_const', 'load_condition',
'structure_type', 'man_press', 'static_draft','name_of_load']
name_of_load = self._new_static_load_name.get()
existing_load = None
if name_of_load in self._load_objects.keys():
existing_load = copy.deepcopy(self._load_objects[name_of_load])
self._load_objects.pop(name_of_load)
self._load_obj_box.delete(0,'end')
for load in self._load_objects.keys():
self._load_obj_box.insert('end', load)
values = [None,None,None,None,self._new_static_condition.get(),None,None,
self._new_static_draft.get(), name_of_load]
count_i = 0
current_load_dict = {}
for item in variables:
current_load_dict[item] = values[count_i]
count_i += 1
self._load_objects[name_of_load] = [Loads(current_load_dict),[] if existing_load is None else existing_load[1]]
self._load_options.append(name_of_load)
self._ent_assosiate_load.destroy()
self._ent_assosiate_load = tk.OptionMenu(self._frame, self._new_assisiate_load, *tuple(self._load_options))
self._ent_assosiate_load.place(relx=0.85,rely=0.21)
self._load_obj_box.insert('end', name_of_load)
self._load_count += 1
self._new_static_load_name.set('static' + str(self._load_count))
def append_line_to_load(self):
'''
Specifying lines for the load
:return:
'''
current_load = self._new_assisiate_load.get()
if current_load != '':
self._load_objects[current_load][1] = []
for line in self._active_lines:
#if line not in self._load_objects[current_load][1]:
self._load_objects[current_load][1].append(line)
else:
mess = tk.messagebox.showwarning('Select load',message='Select a load to apply to the selected lines.',
type='ok')
def save_and_close(self):
'''
Save and close
:return:
'''
if __name__ == '__main__':
self._frame.destroy()
return
for load, data in self._load_objects.items():
for line in data[1]:
self.make_load_comb_dict(line,load)
for line in self.app._line_dict.keys():
self.make_load_comb_dict(line,'manual')
if self._load_objects is not None:
self.app.on_close_load_window(self._load_objects, self._load_count, self._load_comb_dict)
self._frame.destroy()
def on_closing(self):
'''
Action when closing the window without saving.
:return:
'''
if __name__ == '__main__':
self._frame.destroy()
return
mess = tk.messagebox.showwarning('Closed without saving', 'Closing will not save loads you have created',
type = 'okcancel')
if mess == 'ok':
self._frame.grab_release()
self._frame.destroy()
self.app.on_aborted_load_window()
def get_point_canvas_coord(self, point_no):
'''
Returning the canvas coordinates of the point. This value will change with slider.
'''
point_coord_x = self._canvas_draw_origo[0] + self._point_dict[point_no][0] * self._canvas_scale
point_coord_y = self._canvas_draw_origo[1] - self._point_dict[point_no][1] * self._canvas_scale
return [point_coord_x, point_coord_y]
def controls(self):
'''
Specifying the controls to be used.
:return:
'''
self._main_canvas.bind('<Button-1>', self.left_click)
self._main_canvas.bind('<Button-2>', self.button_2_click)
self._main_canvas.bind('<Button-3>', self.right_click)
self._load_obj_box.bind('<Button-1>', self.left_click_load_box)
self._frame.bind('<Shift_L>', self.shift_pressed)
self._frame.bind('<Shift_R>', self.shift_pressed)
self._frame.bind('<Control_L>', self.ctrl_pressed)
self._frame.bind('<Control_R>', self.ctrl_pressed)
self._main_canvas.bind("<MouseWheel>", self.mouse_scroll)
self._main_canvas.bind("<B2-Motion>", self.button_2_click_and_drag)
def shift_pressed(self,event=None):
'''
Event is executed when shift key pressed.
:return:
'''
self._add_to_lines = True
def ctrl_pressed(self,event=None):
'''
Event when control is pressed.
:param event:
:return:
'''
self._add_to_lines = False
def button_2_click(self, event):
'''
Event when right click.
:param evnet:
:return:
'''
self._previous_drag_mouse = [event.x, event.y]
def left_click(self, event):
'''
When clicking the right button, this method is called.
method is referenced in
'''
self._previous_drag_mouse = [event.x, event.y]
click_x = self._main_canvas.winfo_pointerx() - self._main_canvas.winfo_rootx()
click_y = self._main_canvas.winfo_pointery() - self._main_canvas.winfo_rooty()
stop = False
if len(self._line_dict) > 0:
for key, value in self._line_dict.items():
coord1x = self.get_point_canvas_coord('point' + str(value[0]))[0]
coord2x = self.get_point_canvas_coord('point' + str(value[1]))[0]
coord1y = self.get_point_canvas_coord('point' + str(value[0]))[1]
coord2y = self.get_point_canvas_coord('point' + str(value[1]))[1]
vector = [coord2x - coord1x, coord2y - coord1y]
click_x_range = [ix for ix in range(click_x - 10, click_x + 10)]
click_y_range = [iy for iy in range(click_y - 10, click_y + 10)]
distance = int(dist([coord1x, coord1y], [coord2x, coord2y]))
# checking along the line if the click is witnin +- 10 around the click
for dist_mult in range(1, distance - 1):
dist_mult = dist_mult / distance
x_check = int(coord1x) + int(round(vector[0] * dist_mult, 0))
y_check = int(coord1y) + int(round(vector[1] * dist_mult, 0))
if x_check in click_x_range and y_check in click_y_range:
self.line_is_active = True
if self._add_to_lines:
self._active_lines.append(key)
elif self._add_to_lines== False:
if key in self._active_lines:
self._active_lines.remove(key)
self._main_canvas.delete('all')
break
self.draw_canvas()
def right_click(self,event):
'''
Event when right click.
:param evnet:
:return:
'''
self._previous_drag_mouse = [event.x, event.y]
self._active_lines = []
self._main_canvas.delete('all')
self.draw_canvas()
def left_click_load_box(self, *event):
'''
Load boxes consist of self._load_obj_box (active/non-active) and self._load_obj_lines_box (listing assosiated
lines). Both is tkinter ListBox objects.
:param events:
:return:
'''
self._load_obj_lines_box.delete(0,'end')
self._active_lines = []
if len(self._load_objects)!=0:
self._canvas_properties.delete('all')
current_selection = self._load_obj_box.get('active')
current_object = self._load_objects[current_selection][0]
current_lines = self._load_objects[current_selection][1]
self._new_assisiate_load.set(current_selection)
# drawing properties in the canvas
self._canvas_properties.create_text([140, 100], text=self._load_objects[current_selection][0])
for line in sorted([get_num(line) for line in current_lines]):
self._load_obj_lines_box.insert('end','line'+str(line))
self._active_lines.append('line'+str(line))
if current_object.is_static():
self._new_static_load_name.set(current_object.get_load_parmeters()[8])
self._new_static_draft.set(current_object.get_load_parmeters()[7])
self._new_static_condition.set(current_object.get_load_parmeters()[4])
else:
self._new_dynamic_load_name.set(current_object.get_load_parmeters()[8])
self._new_load_poly_third.set(current_object.get_load_parmeters()[0])
self._new_load_poly_second.set(current_object.get_load_parmeters()[1])
self._new_load_poly_first.set(current_object.get_load_parmeters()[2])
self._new_load_poly_const.set(current_object.get_load_parmeters()[3])
self._new_load_manual_pressure.set(current_object.get_load_parmeters()[6])
self._new_dyn_load_condition.set(current_object.get_load_parmeters()[4])
self._new_limit_state.set(current_object.get_load_parmeters()[9])
if current_object.get_load_parmeters()[4] == 'slamming':
self._new_slamming_pressure.set(current_object.get_load_parmeters()[3])
self._new_slamming_pressure_name.set(current_object.get_load_parmeters()[8])
self._new_slamming_pl_mult.set(current_object.get_load_parmeters()[10])
self._new_slamming_stf_mult.set(current_object.get_load_parmeters()[11])
self._load_obj_box.update()
self._ent_assosiate_load.update_idletasks()
self.draw_canvas(load_selected=True)
def import_update(self):
for load, data in self._load_objects.items():
self._load_obj_box.insert('end', load)
self._load_options.append(load)
self._ent_assosiate_load.destroy()
self._ent_assosiate_load = tk.OptionMenu(self._frame, self._new_assisiate_load, *tuple(self._load_options))
self._ent_assosiate_load.place(relx=0.85,rely=0.21)
if __name__ == '__main__':
root = tk.Tk()
my_app = CreateLoadWindow(master=root)
root.mainloop() | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/load_window.py | load_window.py |
import time
from reportlab.lib.enums import TA_LEFT
from reportlab.platypus import Spacer
from reportlab.lib.styles import ParagraphStyle
from PIL import Image
from reportlab.lib.pagesizes import letter, A4, landscape
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.lib.units import mm, inch
from reportlab.pdfgen import canvas
from reportlab.platypus import Image, Paragraph, Table
from time import strftime, gmtime
import os
from matplotlib import pyplot as plt
import matplotlib
cmap_sections = plt.get_cmap('jet')
from reportlab.platypus import SimpleDocTemplate, TableStyle
from reportlab.lib import colors
from matplotlib import colors as matplotlib_colors
import tkinter as tk
try:
import any_files.example_data as test
import any_files.helper as hlp
except ModuleNotFoundError:
import ANYstructure.any_files.example_data as test
import ANYstructure.any_files.helper as hlp
def create_report(input_data):
'''
This class uses the module REPORTLAB to generate a report.
:param line_obj_dict:
:type a dictionary
:return:
'''
Story = []
file_name = "Report_current_results.pdf"
doc = SimpleDocTemplate(file_name, pagesize=letter,
rightMargin=72, leftMargin=72,
topMargin=72, bottomMargin=18)
# logo = "canvas_screenshot.gif"
formatted_time = time.ctime()
# im = Image(logo, 5 * inch, 4 * inch)
# Story.append(im)
styles = getSampleStyleSheet()
styles.add(ParagraphStyle(name='Justify',alignment=TA_LEFT))
ptext = '<font size=12>%s</font>' % formatted_time
Story.append(Paragraph(ptext, styles["Normal"]))
Story.append(Spacer(1, 12))
# Create return address
ptext = '<font size=12>%s</font>' % 'The results for the structure is shown here'
Story.append(Paragraph(ptext, styles["Justify"]))
Story.append(Spacer(1, 3))
ptext = '----------------------------------------------------------------------------------------------------------'
Story.append(Paragraph(ptext, styles["Justify"]))
Story.append(Spacer(5, 3))
for line in input_data['lines'].keys():
struc_obj = input_data['calc_structure'][line]
fat_obj = input_data['calc_fatigue'][line]
pressure = input_data['pressures'][line]
ptext = '<font size=12>' + 'Results for: '+str(line) + '</font>'
Story.append(Paragraph(ptext, styles["Justify"]))
ptext = '<font size=10>'+'Plate thickness: '+ str(struc_obj.Plate.get_pl_thk()*1000)+ ' [mm], Stiffener spacing: '+\
str(struc_obj.get_s()*1000)+' [mm]'+'</font>'
Story.append(Paragraph(ptext, styles["Justify"]))
ptext = '<font size=10>'+'Stiffener: '+ str(struc_obj.Stiffener.get_web_h()*1000)+ 'x' + str(struc_obj.Stiffener.get_web_thk()*1000) \
+ ' + ' + str(struc_obj.Stiffener.get_fl_w()*1000)+ 'x' + str(struc_obj.Stiffener.get_fl_thk()*1000) +'</font>'
Story.append(Paragraph(ptext, styles["Justify"]))
ptext = '<font size=10>'+struc_obj.get_report_stresses()+'</font>'
Story.append(Paragraph(ptext, styles["Justify"]))
ptext = '<font size=10>'+struc_obj.get_results_for_report()+'</font>'
Story.append(Paragraph(ptext, styles["Justify"]))
Story.append(Spacer(2, 3))
ptext = '<font size=12>END OF RESULTS</font>'
Story.append(Paragraph(ptext, styles["Justify"]))
my_canvas = canvas.Canvas(file_name)
my_canvas.line(0,0,200,200)
doc.build(Story, canvasmaker=my_canvas)
class LetterMaker(object):
""""""
def __init__(self, pdf_file, org, seconds, data):
self.c = canvas.Canvas(pdf_file, pagesize=A4)
self.styles = getSampleStyleSheet()
self.width, self.height = A4
self.organization = org
self.seconds = seconds
self.data = data
self.draw_lines()
def createDocument(self):
""""""
voffset = 100
user = os.getlogin()
time_now = strftime("%a, %d %b %Y %H:%M:%S +0000", gmtime())
# create return address
address = """<font size="12"><strong> ANYstructure report generator<br/></strong></font>""" + '<br/>' + \
"""<font size="12"> User: </font>""" + '<font size="12">' + user + '</font>' + '<br/>' + '<br/>' + \
"""<font size="12"> Time : </font>""" + '<font size="12">' + time_now + '</font>' + '<br/>'+ \
'<br/>'+'<font size="12">' + self.data._project_information.get('1.0', tk.END) + '</font>'
p = Paragraph(address, self.styles["Normal"])
# add a logo and size it
img_file_name = 'ANYstructure_logo.jpg'
if os.path.isfile('images/' + img_file_name):
file_path = 'images/' + img_file_name
else:
file_path = os.path.dirname(os.path.abspath(__file__)) + '/images/' + img_file_name
logo = Image(file_path)
logo.drawHeight = 1 * inch
logo.drawWidth = 2.5 * inch
data = [[p, logo]]
table = Table(data, colWidths=4 * inch)
table.setStyle([("VALIGN", (0, 0), (0, 0), "TOP")])
table.wrapOn(self.c, self.width, self.height)
table.drawOn(self.c, *self.coord(18, 50, mm))
self.draw_lines()
ptext = '<font size="12" color = "blue"><strong>' + "Compartments: " + '</strong></font>'
self.createParagraph(ptext, 10, voffset + 85)
delta = 0
h_start = 130
if self.data._tank_dict != {}:
for name, obj in self.data._tank_dict.items():
ptext = '<font size="7" color = "black">' + 'Name: '+ name + ', content: ' \
+ obj.get_content() + '</font>'
self.createParagraph(ptext, h_start, voffset + 100 + delta)
delta += 3
ptext = '<font size="7" color = "black">' + 'Min. elevation: ' + str(obj.get_lowest_elevation()) + \
', Max. elevation: ' + str(obj.get_highest_elevation()) + '</font>'
self.createParagraph(ptext, h_start, voffset + 100 + delta)
delta += 3
ptext = '<font size="7" color = "black">' + 'Applied overpressure: ' + str(obj.get_overpressure()) + \
'</font>'
self.createParagraph(ptext, h_start, voffset + 100 + delta)
delta += 3
ptext = '<font size="7" color = "black">'+'(a_stat, a_dyn_loa, a_dyn_bal): ' + \
str(obj.get_accelerations()) + '</font>'
self.createParagraph(ptext, h_start, voffset + 100 + delta)
delta += 4
try:
self.c.drawImage('current_comps.png', 10,50, width=350, height=250)
except OSError:
self.c.drawImage('current_comps_NONE.png', 10, 50, width=350, height=250)
# insert body of letter
self.c.showPage()
ptext = '<font size="12" color = "blue"><strong>' + "Results for defined structure: " + '</strong></font>'
self.createParagraph(ptext, 10, 0)
delta = 140 if self.data._new_buckling_method.get() == 'DNV-RP-C201 - prescriptive' else 180
vpos = 950
for line in sorted(self.data._line_dict.keys()):
vpos -= delta
if line in self.data._line_to_struc.keys():
if self.data._line_to_struc[line][5] is None:
struc_obj = self.data._line_to_struc[line][0]
fo = self.data._line_to_struc[line][2]
pressure = self.data.get_highest_pressure(line)['normal']/1000
textobject = self.c.beginText()
textobject.setTextOrigin(30,vpos)
textobject.setFont("Helvetica-Oblique", 10)
textobject.textLine('*********** '+line+' ***********')
textobject.textLine('Plate thickness: '+ str(struc_obj.Plate.get_pl_thk()*1000)+ ' [mm] '
'Stiffener spacing: '+
str(struc_obj.Plate.get_s()*1000)+' [mm]'+ ' Span: '+
str(round(struc_obj.Plate.get_span(),4))
+ ' [m]')
if struc_obj.Stiffener is not None:
textobject.textLine('Stiffener: '+ str(struc_obj.Stiffener.get_web_h()*1000)+ 'x' +
str(struc_obj.Stiffener.get_web_thk()*1000)
+ ' + ' + str(struc_obj.Stiffener.get_fl_w()*1000)+ 'x' +
str(struc_obj.Stiffener.get_fl_thk()*1000))
textobject.textLine('Fixation paramters: kps: = '+str(struc_obj.Plate.get_kps())+ ' kpp = '
+ str(struc_obj.Plate.get_kpp())+
', Bending moment factors km1/km2/km3 (support/field/support)' + ' = '+
str(int(struc_obj.Plate.get_km1()))+'/'+
str(int(struc_obj.Plate.get_km2()))+'/'+
str(int(struc_obj.Plate.get_km3())))
textobject.textLine('Defined stresses [MPa]: sigma_x1 = '+str(struc_obj.Plate.get_sigma_x1())+
' sigma_x2 = ' + str(struc_obj.Plate.get_sigma_x2()) +
' sigma_y1 = '+ str(struc_obj.Plate.get_sigma_y1()) +
' sigma_y2 = '+ str(struc_obj.Plate.get_sigma_y2()) +
' tau_xy = ' + str(struc_obj.Plate.get_tau_xy()))
textobject.textLine('ULS max pressure for line: '+ str(round(pressure,2)*1000)
+ ' [kPa]'+' Pressure applied at: '+struc_obj.overpressure_side)
if fo is not None:
textobject.textLine('Fatigue pressure [Pa]: '+' p_int:'+' loaded/ballast/part = '
+ str(round(self.data.get_color_and_calc_state()['pressure_fls'][line]['p_int']['loaded'],0))
+'/'+str(round(self.data.get_color_and_calc_state()['pressure_fls'][line]['p_int']['ballast'],0))
+'/'+str(round(self.data.get_color_and_calc_state()['pressure_fls'][line]['p_int']['part'],0))
+ ' p_ext:'+' loaded/ballast/part = '+
str(round(self.data.get_color_and_calc_state()['pressure_fls'][line]['p_ext']['loaded'],0))
+'/'+str(round(self.data.get_color_and_calc_state()['pressure_fls'][line]['p_ext']['ballast'],0))
+'/'+str(round(self.data.get_color_and_calc_state()['pressure_fls'][line]['p_ext']['part'],0)))
else:
textobject.textLine(' Fatigue pressure: No pressures defined')
textobject.setFillColor('red') if self.data.get_color_and_calc_state()['colors'][line]['section'] == 'red' \
else textobject.setFillColor('black')
textobject.textLine('Section modulus: '+str(int(min(self.data.get_color_and_calc_state()['section_modulus'][line]['sec_mod'])
*1000**3))+ ' [mm3]'+' Min. section modulus: '+
str(int(self.data.get_color_and_calc_state()['section_modulus'][line]['min_sec_mod']*1000**3))+' [mm3]'+
' -> ' + 'OK' if int(min(self.data.get_color_and_calc_state()['section_modulus'][line]['sec_mod'])*1000**3) >=
int(self.data.get_color_and_calc_state()['section_modulus'][line]['min_sec_mod']*1000**3)
else 'Section modulus: '+str(int(min(self.data.get_color_and_calc_state()['section_modulus'][line]['sec_mod'])
*1000**3))+ ' [mm3]'+ ' Min. section modulus: '+
str(int(self.data.get_color_and_calc_state()['section_modulus'][line]['min_sec_mod']*1000**3))+' [mm3]'+
' -> ' + 'NOT OK')
textobject.setFillColor('black')
textobject.setFillColor('red') if self.data.get_color_and_calc_state()['colors'][line]['thickness'] == 'red' \
else textobject.setFillColor('black')
textobject.textLine('Min plate thickness: '+
str(round(self.data.get_color_and_calc_state()['thickness'][line]['min_thk'],2)) + ' [mm] '
' -> ' +
'OK' if struc_obj.Plate.get_pl_thk()*1000 >=
self.data.get_color_and_calc_state()['thickness'][line]['min_thk'] else
'Min plate thickness: '+ str(round(
self.data.get_color_and_calc_state()['thickness'][line]['min_thk'],2)) + ' [mm] '
' -> '+'NOT OK')
textobject.setFillColor('black')
textobject.setFillColor('red') if self.data.get_color_and_calc_state()['colors'][line]['shear'] == 'red' \
else textobject.setFillColor('black')
textobject.textLine('Shear area: '+str(int(self.data.get_color_and_calc_state()['shear_area'][line]['shear_area']*1000**2))+' [mm2] '+
' Min shear area: '+str(int(self.data.get_color_and_calc_state()['shear_area'][line]['min_shear_area']*1000**2))
+ ' [mm2] ' +
' -> ' + 'OK' if self.data.get_color_and_calc_state()['shear_area'][line]['shear_area'] >=
self.data.get_color_and_calc_state()['shear_area'][line]['min_shear_area']
else 'Shear area: '+str(int(self.data.get_color_and_calc_state()['shear_area'][line]['shear_area']*1000**2))+
' [mm2] ' +
' Min shear area: '+str(int(self.data.get_color_and_calc_state()['shear_area'][line]['min_shear_area']*1000**2))
+ ' [mm2] ' + ' -> ' + 'NOT OK')
textobject.setFillColor('black')
if self.data._new_buckling_method.get() == 'DNV-RP-C201 - prescriptive':
textobject.setFillColor('red') if self.data.get_color_and_calc_state()['colors'][line]['buckling'] == 'red' \
else textobject.setFillColor('black')
buc_util = list()
for key, val in self.data.get_color_and_calc_state()['buckling'][line].items():
for uf in val.values():
if type(uf) == list:
buc_util.append(uf[0])
buc_util.append(uf[1])
else:
buc_util.append(uf)
textobject.textLine('Highest buckling utilization DNV-RP-C203: '+
str(round(max(buc_util),2))+
' -> '+'OK' if max(buc_util) < 1 else
'Highest buckling utilization DNV-RP-C203: '+
str(round(max(buc_util),2))+' -> '+'NOT OK')
elif self.data._new_buckling_method.get() == 'DNV PULS':
if self.data._PULS_results is not None:
puls_method = self.data._line_to_struc[line][0].Plate.get_puls_method()
textobject.textLine('PULS results using '+str(puls_method) + 'utilization with acceptance '+
str(self.data._PULS_results.puls_acceptance))
if line in self.data._PULS_results.get_run_results().keys():
puls_buckling = self.data._PULS_results.get_run_results()[line]['Buckling strength']['Actual usage Factor'][0]
puls_ultimate = self.data._PULS_results.get_run_results()[line]['Ultimate capacity']['Actual usage Factor'][0]
if puls_buckling is not None:
if puls_method == 'buckling' and puls_buckling/self.data._PULS_results.puls_acceptance > 1:
textobject.setFillColor('red')
textobject.textLine('PULS buckling utilization = ' + str(puls_buckling))
textobject.setFillColor('black')
if puls_ultimate is not None:
if puls_method == 'ultimate' and puls_ultimate/self.data._PULS_results.puls_acceptance > 1:
textobject.setFillColor('red')
textobject.textLine('PULS ultimate utilization = ' + str(puls_ultimate))
textobject.setFillColor('black')
else:
puls_method = self.data._line_to_struc[line][0].Plate.get_puls_method()
textobject.textLine('ML-CL results using '+str(puls_method) + 'utilization with acceptance 0.87')
if line in self.data._PULS_results.get_run_results().keys():
ml_buckling = self.data.get_color_and_calc_state()['ML buckling class'][line]['buckling']
ml_ultimate = self.data.get_color_and_calc_state()['ML buckling class'][line]['ultimate']
color_ml_buc = self.data.get_color_and_calc_state()['ML buckling colors'][line]['buckling']
color_ml_ult = self.data.get_color_and_calc_state()['ML buckling colors'][line]['ultimate']
color_csr = self.data.get_color_and_calc_state()['ML buckling colors'][line]['CSR requirement']
if puls_method == 'buckling':
textobject.setFillColor('red' if color_ml_buc == 'red' else 'black')
textobject.textLine('Buckling ML-CL results: ' + self.data._ML_classes[ml_buckling])
textobject.setFillColor('black')
if puls_method == 'ultimate':
textobject.setFillColor('red' if color_ml_ult == 'red' else 'black')
textobject.textLine('Ultimate ML-CL result: ' + self.data._ML_classes[ml_ultimate])
textobject.setFillColor('red' if color_csr == 'red' else 'black')
textobject.textLine('CSR tank requirement (stiffener): ' + 'OK' if color_csr == 'green'
else 'red')
textobject.setFillColor('black')
textobject.setFillColor('black')
textobject.setFillColor('red') if self.data.get_color_and_calc_state()['colors'][line]['fatigue'] == 'red' \
else textobject.setFillColor('black')
if self.data.get_color_and_calc_state()['fatigue'][line]['damage'] is not None:
textobject.textLine('Fatigue (plate/stiffeners) utilization: '+
str(round(self.data.get_color_and_calc_state()['fatigue'][line]['damage'],2))+ ' * DFF('+
str(self.data.get_color_and_calc_state()['fatigue'][line]['dff']) + ') = ' +
str(round(self.data.get_color_and_calc_state()['fatigue'][line]['damage']*
self.data.get_color_and_calc_state()['fatigue'][line]['dff'],2)) + ' (SN-curve = '+
self.data.get_color_and_calc_state()['fatigue'][line]['curve']+')')
else:
textobject.textLine('No fatigue results')
# textobject.textLine('Utilization percentage (highest calculated): '+
# str(int(max(self.data.get_color_and_calc_state()['utilization'][line].values())*100))+ '%')
textobject.setFillColor('black')
self.c.drawText(textobject)
vpos -= 10
else:
cyl_obj = self.data._line_to_struc[line][5]
textobject = self.c.beginText()
textobject.setTextOrigin(30, vpos)
textobject.setFont("Helvetica-Oblique", 10)
textobject.textLine('*********** ' + line + ' ***********')
textobject.textLine('Cylinder radius: ' + str(round(cyl_obj.ShellObj.radius*1000,2)) +
' mm , thickness: ' +str(round(cyl_obj.ShellObj.thk*1000,2)) + ' mm')
textobject.textLine('Longitudinal stiffener: ' + cyl_obj.LongStfObj.get_beam_string())
textobject.textLine('Ring stiffener: ' + cyl_obj.LongStfObj.get_beam_string())
textobject.textLine('Heavy ring girder: ' + cyl_obj.LongStfObj.get_beam_string())
textobject.textLine(
'Dist. between rings/length, l: ' + str(round(cyl_obj.ShellObj.dist_between_rings, 1)))
textobject.textLine(
'Lenght of shell, L: ' + str(round(cyl_obj.ShellObj.length_of_shell, 1)) + ' '
+'Total cyl. lenght, Lc: ' + str(round(cyl_obj.ShellObj.tot_cyl_length, 1)))
results = cyl_obj.get_utilization_factors()
textobject.textLine('Design axial stress/force: ' + str(cyl_obj.sasd / 1e6) + ' MPa')
textobject.textLine('Design bending stress/moment: ' + str(cyl_obj.smsd / 1e6) + ' MPa')
textobject.textLine('Design tosional stress/moment: ' + str(cyl_obj.tTsd / 1e6) + ' MPa')
textobject.textLine('Design shear stress/force: ' + str(cyl_obj.tQsd / 1e6) + ' MPa')
textobject.textLine('Design lateral pressure ' + str(cyl_obj.psd / 1e6) + ' MPa' )
textobject.textLine('Additional hoop stress ' + str(cyl_obj.shsd / 1e6) + ' MPa')
vpos -= 40
for key, value in results.items():
if key in ['Weight', 'Need to check column buckling']:
continue
if key not in ['Stiffener check', 'Stiffener check detailed']:
text_key = key
if key == 'Column stability check':
if results['Need to check column buckling'] == False:
continue
uf_text = 'N/A' if value is None else 'OK' if value else 'Not ok'
else:
uf_text = 'N/A' if value is None else str(round(value, 2))
if value is None:
uf_col = 'grey'
else:
uf_col = 'red' if any([value > 1, value == False]) else 'green'
textobject.setFillColor(uf_col)
textobject.textLine(text_key + ' : UF = ' + uf_text)
textobject.setFillColor('black')
elif key == 'Stiffener check':
if value is not None:
textobject.textLine('Stiffener requirement checks:')
stf_type_all = ''
for stf_type, chk_bool in value.items():
chk_text = 'OK' if chk_bool == True else 'Not OK' if chk_bool == False else 'N/A'
stf_type_all += stf_type + ' : ' + chk_text + ' '
textobject.textLine(stf_type_all)
vpos -= 10
self.c.drawText(textobject)
vpos += 10
else:
textobject.setFont("Helvetica-Oblique", 10)
textobject.textLine('*********** '+line+' ***********')
textobject.textLine('(no structural properties defined)')
if vpos <= 290:
self.c.showPage()
vpos = 950
# ----------------------------------------------------------------------
self.c.showPage()
self.draw_lines(draw_type='section')
self.c.showPage()
self.draw_lines(draw_type='plate')
self.c.showPage()
self.draw_lines(draw_type='pressure')
self.c.showPage()
self.draw_lines(draw_type='utilization')
self.c.showPage()
self.draw_lines(draw_type='sigma x')
self.c.showPage()
self.draw_lines(draw_type='sigma y1')
self.c.showPage()
self.draw_lines(draw_type='sigma y2')
self.c.showPage()
self.draw_lines(draw_type='tau xy')
self.c.showPage()
self.draw_lines(draw_type='structure type')
self.c.showPage()
idx, new = 0, False
for load_name in self.data._load_dict.keys():
self.draw_lines(draw_type=None, load_idx_name = [idx % 3, load_name])
if idx % 3 == 2:
self.c.showPage()
idx += 1
def draw_lines(self, draw_type = 'UF', load_idx_name = None):
'''
Draw the defined lines.
:return:
'''
points = self.data._point_dict
lines = self.data._line_dict
if self.data._new_buckling_method.get() == 'DNV-RP-C201 - prescriptive':
colors = self.data.get_color_and_calc_state()['colors']
elif self.data._new_buckling_method.get() == 'DNV PULS':
colors = self.data.get_color_and_calc_state()['PULS colors']
else:
colors = self.data.get_color_and_calc_state()['ML buckling colors']
highest_y = max([coord[1] for coord in points.values()])
highest_x = max([coord[0] for coord in points.values()])
if any([highest_x == 0, highest_y == 0]):
scale = 10
elif load_idx_name is not None:
scale = 5
else:
scale = min(500/highest_y, 500/highest_x, 10)
if draw_type == 'UF':
origo = (50,350)
elif load_idx_name is not None:
origo = (50, 600 - 200*load_idx_name[0])
else:
origo = (50, 450)
self.c.setLineWidth(2)
self.c.setStrokeColor('red')
idx, drawed_data = 0, list()
all_line_data = self.data.get_color_and_calc_state()
for line, pt in lines.items():
if line not in list(self.data._line_to_struc.keys()):
continue
if draw_type == 'UF':
if self.data._new_buckling_method.get() == 'DNV-RP-C201 - prescriptive':
try:
self.c.setStrokeColor('red' if 'red' in colors[line].values() else 'green')
except KeyError:
self.c.setStrokeColor('black')
elif self.data._new_buckling_method.get() == 'DNV PULS':
try:
method = self.data._line_to_struc[line][0].Plate.get_puls_method()
if self.data._PULS_results is not None:
util = self.data._PULS_results.get_utilization(line, method, self.data._new_puls_uf.get())
if util is not None:
self.c.setStrokeColor('red' if util > 1 else 'green')
except KeyError:
self.c.setStrokeColor('black')
else:
method = self.data._line_to_struc[line][0].Plate.get_puls_method()
self.c.setStrokeColor(colors[line][method])
elif draw_type == 'section' and self.data._line_to_struc[line][0].Stiffener is not None:
self.c.setStrokeColor(all_line_data['color code']['lines'][line]['section'])
if self.data._line_to_struc[line][0].Stiffener.get_beam_string() not in drawed_data:
textobject = self.c.beginText()
if 400 - 20 * idx > 20:
textobject.setTextOrigin(50, 400 - 20 * idx)
else:
textobject.setTextOrigin(300, 400 - 20 * idx)
textobject.setFillColor(all_line_data['color code']['lines'][line]['section'])
textobject.setFont("Helvetica-Oblique", 10)
textobject.textLine(self.data._line_to_struc[line][0].Stiffener.get_beam_string())
self.c.drawText(textobject)
drawed_data.append(self.data._line_to_struc[line][0].Stiffener.get_beam_string())
idx += 1
elif draw_type == 'plate':
self.c.setStrokeColor(all_line_data['color code']['lines'][line]['plate'])
elif draw_type == 'pressure':
self.c.setStrokeColor(all_line_data['color code']['lines'][line]['pressure color'])
elif draw_type == 'utilization':
if self.data._new_buckling_method.get() == 'DNV-RP-C201 - prescriptive':
self.c.setStrokeColor(all_line_data['color code']['lines'][line]['rp uf color'])
elif self.data._new_buckling_method.get() == 'DNV PULS':
self.c.setStrokeColor(all_line_data['color code']['lines'][line]['PULS uf color'])
else:
puls_method = self.data._line_to_struc[line][0].Plate.get_puls_method()
self.c.setStrokeColor(matplotlib_colors.rgb2hex(all_line_data['ML buckling colors'][line][puls_method]))
elif draw_type == 'sigma x':
self.c.setStrokeColor(all_line_data['color code']['lines'][line]['sigma x'])
elif draw_type == 'sigma y1':
self.c.setStrokeColor(all_line_data['color code']['lines'][line]['sigma y1'])
elif draw_type == 'sigma y2':
self.c.setStrokeColor(all_line_data['color code']['lines'][line]['sigma y2'])
elif draw_type == 'tau xy':
self.c.setStrokeColor(all_line_data['color code']['lines'][line]['tau xy'])
elif draw_type == 'structure type':
self.c.setStrokeColor(all_line_data['color code']['lines'][line]['structure type'])
elif load_idx_name is not None:
points = self.data._point_dict
highest_y = max([coord[1] for coord in points.values()])
highest_x = max([coord[0] for coord in points.values()])
if any([highest_x == 0, highest_y == 0]):
scale = 10
else:
scale = min(180 / highest_y, 300 / highest_x, 10)
if line in self.data._load_dict[load_idx_name[1]][1]:
self.c.setStrokeColor('orange')
else:
self.c.setStrokeColor('black')
x1, y1 = points['point'+str(pt[0])][0] * scale + origo[0], \
points['point'+str(pt[0])][1] * scale + origo[1]
x2, y2 = points['point'+str(pt[1])][0] * scale + origo[0], \
points['point'+str(pt[1])][1] * scale + origo[1]
self.c.line(x1,y1,x2,y2)
if load_idx_name is None:
textobject = self.c.beginText()
textobject.setTextOrigin(x1+(x2-x1)*0.5-5, y1 + (y2-y1)*0.5+2 )
textobject.setFont("Helvetica-Oblique", 9)
textobject.textLine(str(hlp.get_num(line)))
self.c.drawText(textobject)
if draw_type == 'UF':
pass
elif draw_type == 'section':
textobject = self.c.beginText()
textobject.setTextOrigin(50,800)
textobject.setFont("Helvetica-Oblique", 15)
textobject.setFillColor('black')
textobject.textLine('Model beam section properties')
self.c.drawText(textobject)
elif draw_type == 'plate':
textobject = self.c.beginText()
textobject.setTextOrigin(50, 800)
textobject.setFillColor('black')
textobject.setFont("Helvetica-Oblique", 15)
textobject.textLine('Model plate thicknesses')
self.c.drawText(textobject)
all_thicknesses =self.data.get_color_and_calc_state()['color code']['all thicknesses']
for idx, thk in enumerate(all_thicknesses):
textobject = self.c.beginText()
if 400 - 20 * idx > 20:
textobject.setTextOrigin(50, 400 - 20 * idx)
else:
textobject.setTextOrigin(300, 400 - 20 * idx)
textobject.setFillColor(matplotlib.colors.rgb2hex(cmap_sections(all_thicknesses.index(thk)/
len(all_thicknesses))))
textobject.setFont("Helvetica-Oblique", 10)
textobject.textLine(str(thk*1000) + ' mm')
self.c.drawText(textobject)
elif draw_type == 'pressure':
textobject = self.c.beginText()
textobject.setTextOrigin(50, 800)
textobject.setFillColor('black')
textobject.setFont("Helvetica-Oblique", 15)
textobject.textLine('Highest pressures for lines in model')
self.c.drawText(textobject)
idx = 0
pressure_map =self.data.get_color_and_calc_state()['color code']['pressure map']
for press in pressure_map:
textobject = self.c.beginText()
if 400 - 20 * idx > 20:
textobject.setTextOrigin(50, 400 - 20 * idx)
else:
textobject.setTextOrigin(300, 400 - 20 * idx)
textobject.setFillColor(matplotlib.colors.rgb2hex(cmap_sections(list(pressure_map).index(press)/
len(list(pressure_map)))))
textobject.setFont("Helvetica-Oblique", 10)
textobject.textLine(str(press))
self.c.drawText(textobject)
drawed_data.append(press)
idx += 1
elif draw_type == 'utilization':
textobject = self.c.beginText()
textobject.setTextOrigin(50, 800)
textobject.setFillColor('black')
textobject.setFont("Helvetica-Oblique", 12)
if self.data._new_buckling_method.get() == 'DNV-RP-C201 - prescriptive':
this_text = 'DNV-RP-C201 Buckling Strength of Plated Structures'
elif self.data._new_buckling_method.get() == 'DNV PULS':
this_text = 'Utilization factors (max of all checks) - PULS (Panel Ultimate Limit State)'
else:
this_text = 'ML-CL utilization factors not avaliable. ML-CLassifier only shows ok or not ok.'
textobject.textLine(this_text)
self.c.drawText(textobject)
if self.data._new_buckling_method.get() == 'DNV-RP-C201 - prescriptive':
all_utils = all_line_data['color code']['utilization map']
elif self.data._new_buckling_method.get() == 'DNV PULS':
all_utils = all_line_data['color code']['PULS utilization map']
else:
all_utils = list()
for idx, uf in enumerate(all_utils):
textobject = self.c.beginText()
if 400 - 20 * idx > 20:
textobject.setTextOrigin(50, 400 - 20 * idx)
else:
textobject.setTextOrigin(300, 400 - 20 * idx)
textobject.setFillColor(matplotlib.colors.rgb2hex(cmap_sections(uf)))
textobject.setFont("Helvetica-Oblique", 10)
textobject.textLine(str('UF = ' +str(round(uf,1))))
self.c.drawText(textobject)
elif draw_type == 'structure type':
textobject = self.c.beginText()
textobject.setTextOrigin(50,800)
textobject.setFont("Helvetica-Oblique", 15)
textobject.setFillColor('black')
textobject.textLine('Structure types')
self.c.drawText(textobject)
for idx, value in enumerate(list(all_line_data['color code']['structure types map'])):
textobject = self.c.beginText()
if 400 - 20 * idx > 20:
textobject.setTextOrigin(50, 400 - 20 * idx)
else:
textobject.setTextOrigin(300, 400 - 20 * idx)
textobject.setFillColor(matplotlib.colors.rgb2hex(cmap_sections(
list(all_line_data['color code']['structure types map']).index(value)/
len(list(all_line_data['color code']['structure types map'])))))
textobject.setFont("Helvetica-Oblique", 10)
textobject.textLine(str(value))
self.c.drawText(textobject)
drawed_data.append(value)
elif draw_type == 'sigma x':
textobject = self.c.beginText()
textobject.setTextOrigin(50,800)
textobject.setFont("Helvetica-Oblique", 15)
textobject.setFillColor('black')
textobject.textLine('Global stresses - sigma x')
self.c.drawText(textobject)
for idx, value in enumerate(list(all_line_data['color code']['sigma x map'])):
textobject = self.c.beginText()
if 400 - 20 * idx > 20:
textobject.setTextOrigin(50, 400 - 20 * idx)
else:
textobject.setTextOrigin(300, 400 - 20 * idx)
textobject.setFillColor(matplotlib.colors.rgb2hex(cmap_sections(
list(all_line_data['color code']['sigma x map']).index(value)/
len(list(all_line_data['color code']['sigma x map'])))))
textobject.setFont("Helvetica-Oblique", 10)
textobject.textLine(str(value))
self.c.drawText(textobject)
drawed_data.append(value)
elif draw_type == 'sigma y1':
textobject = self.c.beginText()
textobject.setTextOrigin(50,800)
textobject.setFont("Helvetica-Oblique", 15)
textobject.setFillColor('black')
textobject.textLine('Global stresses - sigma y1')
self.c.drawText(textobject)
for idx, value in enumerate(list(all_line_data['color code']['sigma y1 map'])):
textobject = self.c.beginText()
if 400 - 20 * idx > 20:
textobject.setTextOrigin(50, 400 - 20 * idx)
else:
textobject.setTextOrigin(300, 400 - 20 * idx)
textobject.setFillColor(matplotlib.colors.rgb2hex(cmap_sections(
list(all_line_data['color code']['sigma y1 map']).index(value)/
len(list(all_line_data['color code']['sigma y1 map'])))))
textobject.setFont("Helvetica-Oblique", 10)
textobject.textLine(str(value))
self.c.drawText(textobject)
drawed_data.append(value)
elif draw_type == 'sigma y2':
textobject = self.c.beginText()
textobject.setTextOrigin(50,800)
textobject.setFont("Helvetica-Oblique", 15)
textobject.setFillColor('black')
textobject.textLine('Global stresses - sigma y2')
self.c.drawText(textobject)
for idx, value in enumerate(list(all_line_data['color code']['sigma y2 map'])):
textobject = self.c.beginText()
if 400 - 20 * idx > 20:
textobject.setTextOrigin(50, 400 - 20 * idx)
else:
textobject.setTextOrigin(300, 400 - 20 * idx)
textobject.setFillColor(matplotlib.colors.rgb2hex(cmap_sections(
list(all_line_data['color code']['sigma y2 map']).index(value)/
len(list(all_line_data['color code']['sigma y2 map'])))))
textobject.setFont("Helvetica-Oblique", 10)
textobject.textLine(str(value))
self.c.drawText(textobject)
drawed_data.append(value)
elif draw_type == 'tau xy':
textobject = self.c.beginText()
textobject.setTextOrigin(50,800)
textobject.setFont("Helvetica-Oblique", 15)
textobject.setFillColor('black')
textobject.textLine('Global stresses - tau xy')
self.c.drawText(textobject)
for idx, value in enumerate(list(all_line_data['color code']['tau xy map'])):
textobject = self.c.beginText()
if 400 - 20 * idx > 20:
textobject.setTextOrigin(50, 400 - 20 * idx)
else:
textobject.setTextOrigin(300, 400 - 20 * idx)
textobject.setFillColor(matplotlib.colors.rgb2hex(cmap_sections(
list(all_line_data['color code']['tau xy map']).index(value)/
len(list(all_line_data['color code']['tau xy map'])))))
textobject.setFont("Helvetica-Oblique", 10)
textobject.textLine(str(value))
self.c.drawText(textobject)
drawed_data.append(value)
elif load_idx_name is not None:
for lidx, loadtext in enumerate(reversed(self.data._load_dict[load_idx_name[1]][0].get_report_string())):
textobject = self.c.beginText()
textobject.setTextOrigin(370 , origo[1]+50+ 11*lidx)
textobject.setFont("Helvetica-Oblique", 11)
textobject.setFillColor('black')
textobject.textLine(loadtext)
self.c.drawText(textobject)
def coord(self, x, y, unit=1):
"""
# http://stackoverflow.com/questions/4726011/wrap-text-in-a-table-reportlab
Helper class to help position flowables in Canvas objects
"""
x, y = x * unit, self.height - y * unit
return x, y
# ----------------------------------------------------------------------
def createParagraph(self, ptext, x, y, style=None):
""""""
if not style:
style = self.styles["Normal"]
p = Paragraph(ptext, style=style)
p.wrapOn(self.c, self.width, self.height)
p.drawOn(self.c, *self.coord(x, y, mm))
# ----------------------------------------------------------------------
def savePDF(self):
""""""
self.c.save()
# ----------------------------------------------------------------------
def createTable(self):
'''
Create a table of results for all lines.
'''
table_all = []
cylindersy, flat_plates, idx = False, False,-1
for line in sorted(self.data._line_dict.keys()):
idx += 1
if self.data._line_to_struc[line][5] is None and cylinders is False:
flat_plates = True
if idx == 0:
headers = ['Line', 'pl thk', 's', 'web h', 'web thk', 'fl. w', 'fl. thk', 'sig x1', 'sig x2', 'sig y1',
'sig y2', 'tau xy', 'max press.', 'sec. mod', 'min sec.', 'min plt',
'shr area', 'min shr A', 'fat uf', 'buc uf']
table_all.append(headers)
if line not in list(self.data._line_to_struc.keys()):
continue
struc_obj = self.data._line_to_struc[line][0]
pressure = round(self.data.get_highest_pressure(line)['normal'] / 1000,0)
if self.data._PULS_results is not None:
puls_method = self.data._line_to_struc[line][0].Plate.get_puls_method()
if line in self.data._PULS_results.get_run_results().keys():
if puls_method == 'buckling':
buckling_uf = \
self.data._PULS_results.get_run_results()[line]['Buckling strength']['Actual usage Factor'][0]
else:
buckling_uf = \
self.data._PULS_results.get_run_results()[line]['Ultimate capacity']['Actual usage Factor'][0]
else:
try:
buckling_uf = str(round(max(self.data.get_color_and_calc_state()['buckling'][line]), 2))
except TypeError:
buckling_uf = None
if self.data.get_color_and_calc_state()['fatigue'][line]['damage'] is not None:
fat_uf = self.data.get_color_and_calc_state()['fatigue'][line]['damage']
fat_uf = round(fat_uf, 3)
else:
fat_uf = self.data.get_color_and_calc_state()['fatigue'][line]['damage']
data = [line,str(struc_obj.Plate.get_pl_thk() * 1000), str(struc_obj.Plate.get_s() * 1000),
str('' if struc_obj.Stiffener is None else struc_obj.Stiffener.get_web_h() * 1000),
str('' if struc_obj.Stiffener is None else struc_obj.Stiffener.get_web_thk() * 1000),
str('' if struc_obj.Stiffener is None else struc_obj.Stiffener.get_fl_w() * 1000),
str('' if struc_obj.Stiffener is None else struc_obj.Stiffener.get_fl_thk() * 1000),
str(round(struc_obj.Plate.get_sigma_x1(), 0)), str(round(struc_obj.Plate.get_sigma_x2(), 0)),
str(round(struc_obj.Plate.get_sigma_y1(), 0)),
str(round(struc_obj.Plate.get_sigma_y2(), 0)),
str(round(struc_obj.Plate.get_tau_xy(), 0)), str(round(pressure, 2) * 1000),
str(int(min(self.data.get_color_and_calc_state()['section_modulus'][line]['sec_mod']) * 1000 ** 3)),
str(int(self.data.get_color_and_calc_state()['section_modulus'][line]['min_sec_mod'] * 1000 ** 3)),
str(round(self.data.get_color_and_calc_state()['thickness'][line]['min_thk'], 2)),
str(int(self.data.get_color_and_calc_state()['shear_area'][line]['shear_area'] * 1000 ** 2)),
str(int(self.data.get_color_and_calc_state()['shear_area'][line]['min_shear_area'] * 1000 ** 2)),
fat_uf, buckling_uf]
table_all.append(data)
elif not flat_plates:
cylinders = True
if idx == 0:
headers = ['Line', 'Radius', 'Thickness', 'Span', 'Tot. length',
'Axial stress', 'Bend stress', 'Tors. stress', 'Shear stress', 'Lat. press.',
'Hoop stress',
'UF shell', 'UF long. stf.', 'UF ring. stf.', 'UF girder']
table_all.append(headers)
cyl_obj = self.data._line_to_struc[line][5]
radius = round(cyl_obj.ShellObj.radius * 1000, 2)
thickness = round(cyl_obj.ShellObj.thk * 1000, 2)
long_str = cyl_obj.LongStfObj.get_beam_string()
ring_stf = cyl_obj.LongStfObj.get_beam_string()
heavy_ring = cyl_obj.LongStfObj.get_beam_string()
span = round(cyl_obj.ShellObj.dist_between_rings, 1)
tot_length = round(cyl_obj.ShellObj.length_of_shell, 1)
tot_cyl = round(cyl_obj.ShellObj.tot_cyl_length, 1)
sigma_axial = cyl_obj.sasd / 1e6
sigma_bend = cyl_obj.smsd / 1e6
sigma_tors = cyl_obj.tTsd / 1e6
tau_xy = cyl_obj.tQsd / 1e6
lat_press = cyl_obj.psd / 1e6
sigma_hoop = cyl_obj.shsd / 1e6
results = cyl_obj.get_utilization_factors()
data = [line, radius, thickness, span, tot_length,
sigma_axial,
sigma_bend,
sigma_tors,
tau_xy,
lat_press,
sigma_hoop,
round(0 if results['Unstiffened shell'] is None else results['Unstiffened shell'],2),
round(0 if results['Longitudinal stiffened shell'] is None else results['Longitudinal stiffened shell'],2),
round(0 if results['Ring stiffened shell'] is None else results['Ring stiffened shell'],2),
round(0 if results['Heavy ring frame'] is None else results['Heavy ring frame'],2)
]
table_all.append([str(data_item) for data_item in data])
if cylinders:
t = Table(table_all,colWidths=[0.7*inch])
t.setStyle(TableStyle([
('GRID', (0, 0), (-1, -1), 0.5, colors.gray),
('BACKGROUND', (0, 0), (-1, -1), colors.lightblue),
('FONTSIZE', (0, 0), (-1, -1), 8),
('FONTSIZE', (0, 4), (-1, 4), 8),
('TEXTFONT', (0, 1), (-1, 1), 'Times-Bold'),
('TEXTFONT', (0, 4), (-1, 4), 'Times-Bold'),
]))
else:
t = Table(table_all,colWidths=[0.55*inch])
t.setStyle(TableStyle([
('GRID', (0, 0), (-1, -1), 0.5, colors.gray),
('BACKGROUND', (0, 0), (-1, -1), colors.lightblue),
('FONTSIZE', (0, 0), (-1, -1), 8),
('FONTSIZE', (0, 4), (-1, 4), 8),
('TEXTFONT', (0, 1), (-1, 1), 'Times-Bold'),
('TEXTFONT', (0, 4), (-1, 4), 'Times-Bold'),
]))
return [t,]
if __name__ == '__main__':
import multiprocessing, ctypes, tkinter
import main_application as app
multiprocessing.freeze_support()
errorCode = ctypes.windll.shcore.SetProcessDpiAwareness(2)
root = tkinter.Tk()
my_app = app.Application(root)
ship_example = r'C:\Github\ANYstructure\ship_section_example.txt'
my_app.openfile(ship_example)
#
my_app.table_generate()
#my_app.report_generate(autosave=True)
# doc = LetterMaker("example.pdf", "The MVP", 10, to_report_gen)
# doc.createDocument()
# doc.savePDF() | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/report_generator.py | report_generator.py |
import tkinter as tk
from _tkinter import TclError
from tkinter.ttk import Progressbar
from tkinter import messagebox
from tkinter.filedialog import askopenfilenames
from multiprocessing import Pool, cpu_count
try:
import any_files.main_application
import any_files.optimize as op
import any_files.example_data as test
from any_files.calc_structure import *
import any_files.calc_structure as calc
from any_files.helper import *
import any_files.optimize as opt
except ModuleNotFoundError:
import ANYstructure.any_files.main_application
import ANYstructure.any_files.optimize as op
import ANYstructure.any_files.example_data as test
from ANYstructure.any_files.calc_structure import *
import ANYstructure.any_files.calc_structure as calc
from ANYstructure.any_files.helper import *
import ANYstructure.any_files.optimize as opt
def helper_harmonizer_multi(iterator):
'''
:param :
:return:
'''
this_check, master_x = list(), None
for slave_line in iterator['info']['lines']:
lateral_press = iterator['info'][slave_line]['lateral pressure']
fat_press = iterator['info'][slave_line]['fatigue pressure']
fat_obj = iterator['info'][slave_line]['fatigue object']
slamming_pressure = iterator['info'][slave_line]['slamming pressure']
chk_calc_obj = iterator['info'][slave_line]['chk_calc_obj']
master_x = list(iterator['x'])
ml_cl = iterator['info'][slave_line]['ML-CL']
fup = iterator['info'][slave_line]['fup']
fdwn = iterator['info'][slave_line]['fdwn']
if iterator['info']['keep spacing']:
x = [chk_calc_obj.get_s()] + master_x[1:] + [chk_calc_obj.get_span(), chk_calc_obj.get_lg()]
else:
x = master_x + [chk_calc_obj.get_span(), chk_calc_obj.get_lg()]
chk_any = op.any_constraints_all(x=x, obj=chk_calc_obj, lat_press=lateral_press,
init_weight=float('inf'), side='p', chk=iterator['info']['checks'],
fat_dict=None if fat_obj == None else fat_obj.get_fatigue_properties(),
fat_press=fat_press, slamming_press=slamming_pressure,PULSrun=None,
print_result=False,fdwn=fdwn, fup=fup, ml_results=ml_cl)
this_check.append(chk_any[0])
if all(this_check) and master_x is not None:
return tuple(master_x)
else:
return None
class CreateOptimizeMultipleWindow():
'''
This class initiates the MultiOpt window.
'''
def __init__(self, master, app=None):
super(CreateOptimizeMultipleWindow, self).__init__()
if __name__ == '__main__':
import pickle
self._load_objects = {}
self._load_comb_dict = {}
self._line_dict = test.get_line_dict()
self._load_count = 0
self._point_dict = test.get_point_dict()
self._canvas_scale = 25
self._line_to_struc = test.get_line_to_struc()
self._slamming_pressure = test.get_slamming_pressure()
self._fatigue_pressure = test.get_fatigue_pressures()
self._fatigue_object = test.get_fatigue_object()
self._normal_pressure = test.get_random_pressure()
image_dir = os.path.dirname(__file__)+'\\images\\'
self._active_lines = []
self._ML_buckling = dict() # Buckling machine learning algorithm
for name, file_base in zip(['cl SP buc int predictor', 'cl SP buc int scaler',
'cl SP ult int predictor', 'cl SP ult int scaler',
'cl SP buc GLGT predictor', 'cl SP buc GLGT scaler',
'cl SP ult GLGT predictor', 'cl SP ult GLGT scaler',
'cl UP buc int predictor', 'cl UP buc int scaler',
'cl UP ult int predictor', 'cl UP ult int scaler',
'cl UP buc GLGT predictor', 'cl UP buc GLGT scaler',
'cl UP ult GLGT predictor', 'cl UP ult GLGT scaler'
],
["ml_files\\CL_output_cl_buc_predictor_In-plane_support_cl_1_SP",
"ml_files\\CL_output_cl_buc_scaler_In-plane_support_cl_1_SP",
"ml_files\\CL_output_cl_ult_predictor_In-plane_support_cl_1_SP",
"ml_files\\CL_output_cl_ult_scaler_In-plane_support_cl_1_SP",
"ml_files\\CL_output_cl_buc_predictor_In-plane_support_cl_2,_3_SP",
"ml_files\\CL_output_cl_buc_scaler_In-plane_support_cl_2,_3_SP",
"ml_files\\CL_output_cl_ult_predictor_In-plane_support_cl_2,_3_SP",
"ml_files\\CL_output_cl_ult_scaler_In-plane_support_cl_2,_3_SP",
"ml_files\\CL_output_cl_buc_predictor_In-plane_support_cl_1_UP",
"ml_files\\CL_output_cl_buc_scaler_In-plane_support_cl_1_UP",
"ml_files\\CL_output_cl_ult_predictor_In-plane_support_cl_1_UP",
"ml_files\\CL_output_cl_ult_scaler_In-plane_support_cl_1_UP",
"ml_files\\CL_output_cl_buc_predictor_In-plane_support_cl_2,_3_UP",
"ml_files\\CL_output_cl_buc_scaler_In-plane_support_cl_2,_3_UP",
"ml_files\\CL_output_cl_ult_predictor_In-plane_support_cl_2,_3_UP",
"ml_files\\CL_output_cl_ult_scaler_In-plane_support_cl_2,_3_UP",
"CL_CSR-Tank_req_cl_predictor",
"CL_CSR-Tank_req_cl_UP_scaler",
"CL_CSR_plate_cl,_CSR_web_cl,_CSR_web_flange_cl,_CSR_flange_cl_predictor",
"CL_CSR_plate_cl,_CSR_web_cl,_CSR_web_flange_cl,_CSR_flange_cl_SP_scaler"]):
self._ML_buckling[name] = None
if os.path.isfile(file_base + '.pickle'):
file = open(file_base + '.pickle', 'rb')
from sklearn.neural_network import MLPClassifier
from sklearn.preprocessing import StandardScaler
self._ML_buckling[name] = pickle.load(file)
file.close()
else:
self.app = app
self._load_objects = app._load_dict
self._load_comb_dict = app._new_load_comb_dict
self._line_dict = app._line_dict
self._load_count = 0
self._point_dict = app._point_dict
self._canvas_scale = app._canvas_scale
self._line_to_struc = app._line_to_struc
image_dir = app._root_dir + '\\images\\'
self._root_dir = app._root_dir
self._active_lines = app._multiselect_lines
self._ML_buckling = app._ML_buckling
self._frame = master
self._frame.wm_title("Optimize structure")
self._frame.geometry('1800x950')
self._frame.grab_set()
self._canvas_origo = (50, 720 - 50)
self._canvas_base_origo = self._canvas_origo
self._canvas_draw_origo = list(self._canvas_base_origo)
self._previous_drag_mouse = list(self._canvas_draw_origo)
self._active_lines = []
self._add_to_lines = True
self._lines_add_to_load = []
self._mid_click_line = None
self._predefined_structure = None
# ----------------------------------COPIED FROM OPTIMIZE_WINDOW-----------------------------------------------
self._opt_results = {}
self._opt_actual_running_time = tk.Label(self._frame, text='')
tk.Frame(self._frame, width=770, height=5, bg="grey", colormap="new").place(x=20, y=95)
tk.Frame(self._frame, width=770, height=5, bg="grey", colormap="new").place(x=20, y=135)
algorithms = ('anysmart', 'random', 'random_no_delta')
tk.Label(self._frame, text='-- Structural optimizer for multiple selections --',
font='Verdana 15 bold').place(x=10, y=10)
# upper and lower bounds for optimization
# [0.6, 0.012, 0.3, 0.01, 0.1, 0.01]
self._new_spacing_upper = tk.DoubleVar()
self._new_spacing_lower = tk.DoubleVar()
self._new_pl_thk_upper = tk.DoubleVar()
self._new_pl_thk_lower = tk.DoubleVar()
self._new_web_h_upper = tk.DoubleVar()
self._new_web_h_lower = tk.DoubleVar()
self._new_web_thk_upper = tk.DoubleVar()
self._new_web_thk_lower = tk.DoubleVar()
self._new_fl_w_upper = tk.DoubleVar()
self._new_fl_w_lower = tk.DoubleVar()
self._new_fl_thk_upper = tk.DoubleVar()
self._new_fl_thk_lower = tk.DoubleVar()
self._new_span = tk.DoubleVar()
self._new_width_lg = tk.DoubleVar()
self._new_algorithm = tk.StringVar()
self._new_algorithm_random_trials = tk.IntVar()
self._new_delta_spacing = tk.DoubleVar()
self._new_delta_pl_thk = tk.DoubleVar()
self._new_delta_web_h = tk.DoubleVar()
self._new_delta_web_thk = tk.DoubleVar()
self._new_delta_fl_w = tk.DoubleVar()
self._new_delta_fl_thk = tk.DoubleVar()
self._new_swarm_size = tk.IntVar()
self._new_omega = tk.DoubleVar()
self._new_phip = tk.DoubleVar()
self._new_phig = tk.DoubleVar()
self._new_maxiter = tk.IntVar()
self._new_minstep = tk.DoubleVar()
self._new_minfunc = tk.DoubleVar()
ent_w = 10
self._ent_spacing_upper = tk.Entry(self._frame, textvariable=self._new_spacing_upper, width=ent_w)
self._ent_spacing_lower = tk.Entry(self._frame, textvariable=self._new_spacing_lower, width=ent_w)
self._ent_pl_thk_upper = tk.Entry(self._frame, textvariable=self._new_pl_thk_upper, width=ent_w)
self._ent_pl_thk_lower = tk.Entry(self._frame, textvariable=self._new_pl_thk_lower, width=ent_w)
self._ent_web_h_upper = tk.Entry(self._frame, textvariable=self._new_web_h_upper, width=ent_w)
self._ent_web_h_lower = tk.Entry(self._frame, textvariable=self._new_web_h_lower, width=ent_w)
self._ent_web_thk_upper = tk.Entry(self._frame, textvariable=self._new_web_thk_upper, width=ent_w)
self._ent_web_thk_lower = tk.Entry(self._frame, textvariable=self._new_web_thk_lower, width=ent_w)
self._ent_fl_w_upper = tk.Entry(self._frame, textvariable=self._new_fl_w_upper, width=ent_w)
self._ent_fl_w_lower = tk.Entry(self._frame, textvariable=self._new_fl_w_lower, width=ent_w)
self._ent_fl_thk_upper = tk.Entry(self._frame, textvariable=self._new_fl_thk_upper, width=ent_w)
self._ent_fl_thk_lower = tk.Entry(self._frame, textvariable=self._new_fl_thk_lower, width=ent_w)
self._ent_span = tk.Entry(self._frame, textvariable=self._new_span, width=ent_w)
self._ent_width_lg = tk.Entry(self._frame, textvariable=self._new_width_lg, width=ent_w)
self._ent_algorithm = tk.OptionMenu(self._frame, self._new_algorithm, command=self.selected_algorithm, *algorithms)
self._ent_random_trials = tk.Entry(self._frame, textvariable=self._new_algorithm_random_trials)
self._ent_delta_spacing = tk.Entry(self._frame, textvariable=self._new_delta_spacing, width=ent_w)
self._ent_delta_pl_thk = tk.Entry(self._frame, textvariable=self._new_delta_pl_thk, width=ent_w)
self._ent_delta_web_h = tk.Entry(self._frame, textvariable=self._new_delta_web_h, width=ent_w)
self._ent_delta_web_thk = tk.Entry(self._frame, textvariable=self._new_delta_web_thk, width=ent_w)
self._ent_delta_fl_w = tk.Entry(self._frame, textvariable=self._new_delta_fl_w, width=ent_w)
self._ent_delta_fl_thk = tk.Entry(self._frame, textvariable=self._new_delta_fl_thk, width=ent_w)
pso_width = 10
self._ent_swarm_size = tk.Entry(self._frame,textvariable=self._new_swarm_size, width = pso_width)
self._ent_omega = tk.Entry(self._frame,textvariable=self._new_omega, width = pso_width)
self._ent_phip = tk.Entry(self._frame,textvariable=self._new_phip, width = pso_width)
self._ent_phig = tk.Entry(self._frame,textvariable=self._new_phig, width = pso_width)
self._ent_maxiter = tk.Entry(self._frame,textvariable=self._new_maxiter, width = pso_width)
self._ent_minstep = tk.Entry(self._frame,textvariable=self._new_minstep, width = pso_width)
self._ent_minfunc = tk.Entry(self._frame,textvariable=self._new_minfunc, width = pso_width)
start_x, start_y, dx, dy = 20, 70, 100, 40
self._new_processes = tk.IntVar()
self._new_processes.set(max(cpu_count() - 1, 1))
tk.Label(self._frame, text='Processes\n (CPUs)', font='Verdana 9 bold', bg = 'silver')\
.place(x=start_x + 12.3 * dx, y=start_y - 0.2 * dy)
tk.Entry(self._frame, textvariable=self._new_processes, width = 12, bg = 'silver')\
.place(x=start_x + 12.3 * dx, y=start_y + 0.7* dy)
self._prop_canvas_dim = (500, 450)
self._draw_scale = 500
self._canvas_opt = tk.Canvas(self._frame, width=self._prop_canvas_dim[0], height=self._prop_canvas_dim[1],
background='azure',relief='groove', borderwidth=2)
self._canvas_opt.place(x=start_x+10.5*dx, y=start_y+3.5*dy)
self._select_canvas_dim = (1000, 720)
self._canvas_select = tk.Canvas(self._frame, width=self._select_canvas_dim[0], height=self._select_canvas_dim[1],
background='azure',relief='groove', borderwidth=2)
self._canvas_select.place(x=start_x+0*dx, y=start_y+3.5*dy)
# Labels for the pso
self._lb_swarm_size = tk.Label(self._frame, text='swarm size')
self._lb_omega = tk.Label(self._frame, text='omega')
self._lb_phip = tk.Label(self._frame, text='phip')
self._lb_phig = tk.Label(self._frame, text='phig')
self._lb_maxiter = tk.Label(self._frame, text='maxiter')
self._lb_minstep = tk.Label(self._frame, text='minstep')
self._lb_minfunc = tk.Label(self._frame, text='minfunc')
tk.Label(self._frame, text='Upper bounds [mm]', font='Verdana 9').place(x=start_x, y=start_y)
tk.Label(self._frame, text='Iteration delta [mm]', font='Verdana 9').place(x=start_x, y=start_y + dy)
tk.Label(self._frame, text='Lower bounds [mm]', font='Verdana 9').place(x=start_x, y=start_y + 2 * dy)
tk.Label(self._frame, text='Spacing [mm]', font='Verdana 7 bold').place(x=start_x + 1.97 * dx,
y=start_y - 0.6 * dy)
tk.Label(self._frame, text='Plate thk. [mm]', font='Verdana 7 bold').place(x=start_x + 2.97 * dx,
y=start_y - 0.6 * dy)
tk.Label(self._frame, text='Web height [mm]', font='Verdana 7 bold').place(x=start_x + 3.97 * dx,
y=start_y - 0.6 * dy)
tk.Label(self._frame, text='Web thk. [mm]', font='Verdana 7 bold').place(x=start_x + 4.97 * dx,
y=start_y - 0.6 * dy)
tk.Label(self._frame, text='Flange width [mm]', font='Verdana 7 bold').place(x=start_x + 5.97 * dx,
y=start_y - 0.6 * dy)
tk.Label(self._frame, text='Flange thk. [mm]', font='Verdana 7 bold').place(x=start_x + 6.97 * dx,
y=start_y - 0.6 * dy)
tk.Label(self._frame, text='Estimated running time for algorithm: ',
font='Verdana 9 bold').place(x=start_x, y=start_y + 2.8 * dy)
self._runnig_time_label = tk.Label(self._frame, text='', font='Verdana 9 bold')
self._runnig_time_label.place(x=start_x + 2.7 * dx, y=start_y + 2.8 * dy)
tk.Label(self._frame, text='seconds ', font='Verdana 9 bold').place(x=start_x + 3.3 * dx, y=start_y + 2.8 * dy)
self._result_label = tk.Label(self._frame, text='', font='Verdana 9 bold')
self._result_label.place(x=start_x, y=start_y + 4 * dy)
self._ent_spacing_upper.place(x=start_x + dx * 2, y=start_y)
self._ent_delta_spacing.place(x=start_x + dx * 2, y=start_y + dy)
self._ent_spacing_lower.place(x=start_x + dx * 2, y=start_y + 2 * dy)
self._ent_pl_thk_upper.place(x=start_x + dx * 3, y=start_y)
self._ent_delta_pl_thk.place(x=start_x + dx * 3, y=start_y + dy)
self._ent_pl_thk_lower.place(x=start_x + dx * 3, y=start_y + 2 * dy)
self._ent_web_h_upper.place(x=start_x + dx * 4, y=start_y)
self._ent_delta_web_h.place(x=start_x + dx * 4, y=start_y + dy)
self._ent_web_h_lower.place(x=start_x + dx * 4, y=start_y + 2 * dy)
self._ent_web_thk_upper.place(x=start_x + dx * 5, y=start_y)
self._ent_delta_web_thk.place(x=start_x + dx * 5, y=start_y + dy)
self._ent_web_thk_lower.place(x=start_x + dx * 5, y=start_y + 2 * dy)
self._ent_fl_w_upper.place(x=start_x + dx * 6, y=start_y)
self._ent_delta_fl_w.place(x=start_x + dx * 6, y=start_y + dy)
self._ent_fl_w_lower.place(x=start_x + dx * 6, y=start_y + 2 * dy)
self._ent_fl_thk_upper.place(x=start_x + dx * 7, y=start_y)
self._ent_delta_fl_thk.place(x=start_x + dx * 7, y=start_y + dy)
self._ent_fl_thk_lower.place(x=start_x + dx * 7, y=start_y + 2 * dy)
# setting default values
init_dim = float(10) # mm
init_thk = float(1) # mm
self._new_delta_spacing.set(init_dim)
self._new_delta_pl_thk.set(init_thk)
self._new_delta_web_h.set(init_dim)
self._new_delta_web_thk.set(init_thk)
self._new_delta_fl_w.set(init_dim)
self._new_delta_fl_thk.set(init_thk)
self._new_spacing_upper.set(round(800, 5))
self._new_spacing_lower.set(round(600, 5))
self._new_pl_thk_upper.set(round(25, 5))
self._new_pl_thk_lower.set(round(15, 5))
self._new_web_h_upper.set(round(500, 5))
self._new_web_h_lower.set(round(400, 5))
self._new_web_thk_upper.set(round(20, 5))
self._new_web_thk_lower.set(round(10, 5))
self._new_fl_w_upper.set(round(200, 5))
self._new_fl_w_lower.set(round(100, 5))
self._new_fl_thk_upper.set(round(30, 5))
self._new_fl_thk_lower.set(round(15, 5))
self._new_algorithm.set('anysmart')
self._new_algorithm_random_trials.set(10000)
# Selection of constraints
self._new_check_sec_mod = tk.BooleanVar()
self._new_check_min_pl_thk = tk.BooleanVar()
self._new_check_shear_area = tk.BooleanVar()
self._new_check_buckling = tk.BooleanVar()
self._new_check_fatigue = tk.BooleanVar()
self._new_check_slamming = tk.BooleanVar()
self._new_check_local_buckling = tk.BooleanVar()
self._new_check_ml_buckling = tk.BooleanVar()
self._new_harmonizer = tk.BooleanVar()
self._keep_spacing = tk.BooleanVar()
self._new_check_sec_mod.set(True)
self._new_check_min_pl_thk.set(True)
self._new_check_shear_area.set(True)
self._new_check_buckling.set(True)
self._new_check_fatigue.set(True)
self._new_check_slamming.set(False)
self._new_check_local_buckling.set(True)
self._new_harmonizer.set(False)
self._keep_spacing.set(False)
self._new_check_ml_buckling.set(False)
self._new_swarm_size.set(100)
self._new_omega.set(0.5)
self._new_phip.set(0.5)
self._new_phig.set(0.5)
self._new_maxiter.set(100)
self._new_minstep.set(1e-8)
self._new_minfunc.set(1e-8)
self._new_delta_spacing.trace('w', self.update_running_time)
self._new_delta_pl_thk.trace('w', self.update_running_time)
self._new_delta_web_h.trace('w', self.update_running_time)
self._new_delta_web_thk.trace('w', self.update_running_time)
self._new_delta_fl_w.trace('w', self.update_running_time)
self._new_delta_fl_thk.trace('w', self.update_running_time)
self._new_spacing_upper.trace('w', self.update_running_time)
self._new_spacing_lower.trace('w', self.update_running_time)
self._new_pl_thk_upper.trace('w', self.update_running_time)
self._new_pl_thk_lower.trace('w', self.update_running_time)
self._new_web_h_upper.trace('w', self.update_running_time)
self._new_web_h_lower.trace('w', self.update_running_time)
self._new_web_thk_upper.trace('w', self.update_running_time)
self._new_web_thk_lower.trace('w', self.update_running_time)
self._new_fl_w_upper.trace('w', self.update_running_time)
self._new_fl_w_lower.trace('w', self.update_running_time)
self._new_fl_thk_upper.trace('w', self.update_running_time)
self._new_fl_thk_lower.trace('w', self.update_running_time)
self._new_algorithm_random_trials.trace('w', self.update_running_time)
self._new_algorithm.trace('w', self.update_running_time)
self._keep_spacing.trace('w',self.trace_keep_spacing_check)
self._new_check_ml_buckling.trace('w', self.update_running_time)
self.running_time_per_item = 1.009943181818182e-5
self._runnig_time_label.config(text=str(self.get_running_time()))
tk.Label(self._frame, text='Select algorithm type --->', font='Verdana 8 bold').place(x=start_x + dx * 8,
y=start_y + 1 * dy)
self._ent_algorithm.place(x=start_x + dx * 10, y=start_y + dy)
self.algorithm_random_label = tk.Label(self._frame, text='Number of trials')
tk.Button(self._frame, text='algorithm information', command=self.algorithm_info, bg='white') \
.place(x=start_x + dx * 15, y=start_y + dy *-0.5)
self.run_button = tk.Button(self._frame, text='RUN OPTIMIZATION!', command=self.run_optimizaion, bg='red',
font='Verdana 10', fg='Yellow')
self.run_button.place(x=start_x + dx * 8, y=start_y)
self.run_results = tk.Button(self._frame,text='show calculated', command=self.plot_results, bg='white',
font='Verdana 10',fg='black')
self.run_results.place(x=start_x+dx*8, y=start_y+dy*1.5)
self._opt_actual_running_time.place(x=start_x + dx * 8, y=start_y - dy * 1.5)
self.close_and_save = tk.Button(self._frame, text='Return and replace with selected optimized structure',
command=self.save_and_close, bg='green', font='Verdana 10 bold', fg='yellow')
self.close_and_save.place(x=start_x + dx * 10, y=10)
tk.Button(self._frame, text='Open predefined stiffeners example',
command=self.open_example_file, bg='white', font='Verdana 10')\
.place(x=start_x+dx*15,y=10)
start_y, start_x, dy = 530, 100, 35
tk.Label(self._frame,text='Check for minimum section modulus').place(x=start_x+dx*9.7,y=start_y+4*dy)
tk.Label(self._frame, text='Check for minimum plate thk.').place(x=start_x+dx*9.7,y=start_y+5*dy)
tk.Label(self._frame, text='Check for minimum shear area').place(x=start_x+dx*9.7,y=start_y+6*dy)
tk.Label(self._frame, text='Check for buckling (RP-C201)').place(x=start_x+dx*9.7,y=start_y+7*dy)
tk.Label(self._frame, text='Check for fatigue (RP-C203)').place(x=start_x + dx * 9.7, y=start_y + 8 * dy)
tk.Label(self._frame, text='Check for bow slamming').place(x=start_x + dx * 9.7, y=start_y + 9 * dy)
tk.Label(self._frame, text='Check for local stf. buckling').place(x=start_x + dx * 9.7, y=start_y + 10 * dy)
tk.Label(self._frame, text='Check for buckling (ML-CL)').place(x=start_x + dx * 9.7, y=start_y + 11 * dy)
tk.Label(self._frame, text='Check to harmonize results. Same stiffener and plate dimensions '
'(defined by largest in opt).', font='Verdana 10 bold')\
.place(x=start_x + dx * +8.5, y=start_y - 10.5 * dy)
tk.Label(self._frame, text='Check to skip iterating over spacing (respective line spacing used).',
font='Verdana 10 bold')\
.place(x=start_x + dx * +8.5, y=start_y - 9.8 * dy)
tk.Checkbutton(self._frame,variable=self._new_check_sec_mod).place(x=start_x+dx*12,y=start_y+4*dy)
tk.Checkbutton(self._frame, variable=self._new_check_min_pl_thk).place(x=start_x+dx*12,y=start_y+5*dy)
tk.Checkbutton(self._frame, variable=self._new_check_shear_area).place(x=start_x+dx*12,y=start_y+6*dy)
tk.Checkbutton(self._frame, variable=self._new_check_buckling).place(x=start_x+dx*12,y=start_y+7*dy)
tk.Checkbutton(self._frame, variable=self._new_check_fatigue).place(x=start_x + dx * 12, y=start_y + 8 * dy)
tk.Checkbutton(self._frame, variable=self._new_check_slamming).place(x=start_x + dx * 12, y=start_y + 9 * dy)
tk.Checkbutton(self._frame, variable=self._new_check_local_buckling).place(x=start_x + dx * 12,
y=start_y + 10 * dy)
tk.Checkbutton(self._frame, variable=self._new_check_ml_buckling).place(x=start_x + dx * 12,
y=start_y + 11 * dy)
tk.Checkbutton(self._frame, variable=self._new_harmonizer).place(x=start_x + dx * 8, y=start_y - 10.5 * dy)
tk.Checkbutton(self._frame, variable=self._keep_spacing).place(x=start_x + dx * +8, y=start_y - 9.8 * dy)
self._toggle_btn = tk.Button(self._frame, text="Iterate predefiened stiffeners", relief="raised",
command=self.toggle, bg = 'salmon')
self._toggle_btn.place(x=start_x+dx*9, y=start_y - dy * 13)
self._toggle_object, self._filez = None, None
# Stress scaling
self._new_fup = tk.DoubleVar()
self._new_fup.set(0.5)
self._new_fdwn = tk.DoubleVar()
self._new_fdwn.set(1)
tk.Label(self._frame, text='Factor when scaling stresses up, fup')\
.place(x=start_x + dx * 13, y=start_y + 5 * dy)
ent_fup = tk.Entry(self._frame, textvariable=self._new_fup, width = 10)
ent_fup.place(x=start_x + dx * 15.5, y=start_y + 5 * dy)
tk.Label(self._frame, text='Factor when scaling stresses up, fdown')\
.place(x=start_x + dx * 13, y=start_y + 6 * dy)
ent_fdwn = tk.Entry(self._frame, textvariable=self._new_fdwn, width = 10)
ent_fdwn.place(x=start_x + dx * 15.5, y=start_y + 6 * dy)
self.draw_properties()
# ----------------------------------END OF OPTIMIZE SINGLE COPY-----------------------------------------------
self.progress_count = tk.IntVar()
self.progress_count.set(0)
self.progress_bar = Progressbar(self._frame, orient="horizontal",length=200, mode="determinate",
variable=self.progress_count)
self.progress_bar.place(x=start_x+dx*10.5,y=start_y-dy*11.5)
self.controls()
self.draw_select_canvas()
self._harmonizer_data = {}
def trace_keep_spacing_check(self, *args):
if self._keep_spacing.get():
self._ent_spacing_lower.configure({"background": "red"})
self._ent_delta_spacing.configure({"background": "red"})
self._ent_spacing_upper.configure({"background": "red"})
def selected_algorithm(self, event):
'''
Action when selecting an algorithm in the optionm menu.
:return:
'''
start_x, start_y, dx, dy = 20, 100, 100, 40
if self._new_algorithm.get() == 'random' or self._new_algorithm.get() == 'random_no_delta':
self._ent_random_trials.place_forget()
self.algorithm_random_label.place_forget()
self._lb_swarm_size.place_forget()
self._lb_omega.place_forget()
self._lb_phip.place_forget()
self._lb_phig.place_forget()
self._lb_maxiter.place_forget()
self._lb_minstep.place_forget()
self._lb_minfunc.place_forget()
self._ent_swarm_size.place_forget()
self._ent_omega.place_forget()
self._ent_phip.place_forget()
self._ent_phig.place_forget()
self._ent_maxiter.place_forget()
self._ent_minstep.place_forget()
self._ent_minfunc.place_forget()
self._ent_random_trials.place(x=start_x + dx * 11.3, y=start_y + 1.2 * dy)
self.algorithm_random_label.place(x=start_x + dx * 11.3, y=start_y + 0.5 * dy)
elif self._new_algorithm.get() == 'anysmart' or self._new_algorithm.get() == 'anydetail':
self._ent_random_trials.place_forget()
self.algorithm_random_label.place_forget()
self._lb_swarm_size.place_forget()
self._lb_omega.place_forget()
self._lb_phip.place_forget()
self._lb_phig.place_forget()
self._lb_maxiter.place_forget()
self._lb_minstep.place_forget()
self._lb_minfunc.place_forget()
self._ent_swarm_size.place_forget()
self._ent_omega.place_forget()
self._ent_phip.place_forget()
self._ent_phig.place_forget()
self._ent_maxiter.place_forget()
self._ent_minstep.place_forget()
self._ent_minfunc.place_forget()
elif self._new_algorithm.get() == 'pso':
y_place_label = 11.2
y_place = 12.2
self._ent_random_trials.place_forget()
start_x = 150
self._lb_swarm_size.place(x=start_x + dx*11 , y=start_y - 1 * dy)
self._lb_omega.place(x=start_x + dx*11 , y=start_y - 0 * dy)
self._lb_phip.place(x=start_x + dx*11 , y=start_y + 1 * dy)
self._lb_phig.place(x=start_x + dx*11 , y=start_y + 2 * dy)
self._lb_maxiter.place(x=start_x + dx*14 , y=start_y - 1 * dy)
self._lb_minstep.place(x=start_x + dx*14, y=start_y + 0 * dy)
self._lb_minfunc.place(x=start_x + dx*14, y=start_y + 1 * dy)
self._ent_swarm_size.place(x=start_x + dx*12 , y=start_y - 1 * dy)
self._ent_omega.place(x=start_x + dx*12 , y=start_y - 0 * dy)
self._ent_phip.place(x=start_x + dx*12 , y=start_y + 1 * dy)
self._ent_phig.place(x=start_x + dx*12 , y=start_y + 2 * dy)
self._ent_maxiter.place(x=start_x + dx*15 , y=start_y - 1 * dy)
self._ent_minstep.place(x=start_x + dx*15, y=start_y + 0 * dy)
self._ent_minfunc.place(x=start_x + dx*15, y=start_y + 1 * dy)
def get_pressure_input(self, line):
if __name__ == '__main__':
lateral_press = self._normal_pressure
fat_press = self._fatigue_pressure
slamming_pressure = self._slamming_pressure
fat_obj = self._fatigue_object
else:
lateral_press = self.app.get_highest_pressure(line)['normal'] / 1e6
fat_obj = self.app._line_to_struc[line][2]
if fat_obj is not None:
try:
fat_press = self.app.get_fatigue_pressures(line, fat_obj.get_accelerations())
except AttributeError:
fat_press = None
else:
fat_press = {'p_ext': {'loaded': 0, 'ballast': 0, 'part': 0},
'p_int': {'loaded': 0, 'ballast': 0, 'part': 0}}
try:
if self.app.get_highest_pressure(line)['slamming'] is None:
slamming_pressure = 0
else:
slamming_pressure = self.app.get_highest_pressure(line)['slamming']
except [KeyError, AttributeError]:
slamming_pressure = 0
fat_press = ((fat_press['p_ext']['loaded'], fat_press['p_ext']['ballast'],
fat_press['p_ext']['part']),
(fat_press['p_int']['loaded'], fat_press['p_int']['ballast'],
fat_press['p_int']['part']))
return {'lateral pressure': lateral_press, 'fatigue pressure': fat_press,
'slamming pressure': slamming_pressure, 'fatigue object': fat_obj}
def run_optimizaion(self):
'''
Function when pressing the optimization botton inside this window.
:return:
'''
self.run_button.config(bg = 'white')
self._opt_results = {}
t_start = time.time()
self.progress_bar.config(maximum=len(self._active_lines))
self._opt_actual_running_time.config(text='')
contraints = (self._new_check_sec_mod.get(), self._new_check_min_pl_thk.get(),
self._new_check_shear_area.get(), self._new_check_buckling.get(),
self._new_check_fatigue.get(), self._new_check_slamming.get(),
self._new_check_local_buckling.get(), False, self._new_check_ml_buckling.get(), False)
self.pso_parameters = (self._new_swarm_size.get(),self._new_omega.get(),self._new_phip.get(),
self._new_phig.get(),self._new_maxiter.get(),self._new_minstep.get(),
self._new_minfunc.get())
max_min_span = None
self.progress_count.set(0)
counter = 0
found_files = self._filez
for line in self._active_lines:
init_obj = self._line_to_struc[line][0]
if __name__ == '__main__':
lateral_press = 200 #for testing
fat_obj = test.get_fatigue_object()
fat_press = test.get_fatigue_pressures()
slamming_pressure = test.get_slamming_pressure()
fat_press = ((fat_press['p_ext']['loaded'], fat_press['p_ext']['ballast'],
fat_press['p_ext']['part']),
(fat_press['p_int']['loaded'], fat_press['p_int']['ballast'],
fat_press['p_int']['part']))
else:
input_pressures = self.get_pressure_input(line)
lateral_press = input_pressures['lateral pressure']
fat_press = input_pressures['fatigue pressure']
slamming_pressure = input_pressures['slamming pressure']
fat_obj = input_pressures['fatigue object']
if self._toggle_btn.config('relief')[-1] == 'sunken':
found_files, predefined_stiffener_iter = self.toggle(found_files=found_files, obj = init_obj,
iterating=True)
else:
predefined_stiffener_iter = None
self._opt_results[line] = list(op.run_optmizataion(init_obj, self.get_lower_bounds(init_obj),
self.get_upper_bounds(init_obj),
lateral_press,self.get_deltas(),
algorithm=self._new_algorithm.get(),
trials=self._new_algorithm_random_trials.get(),
side=init_obj.Plate.get_side(),
const_chk=contraints,
pso_options=self.pso_parameters,
fatigue_obj=fat_obj,
fat_press_ext_int=fat_press,
slamming_press=slamming_pressure,
predefined_stiffener_iter = predefined_stiffener_iter,
processes=self._new_processes.get(),
min_max_span=max_min_span, use_weight_filter=False,
fdwn = self._new_fdwn.get(), fup = self._new_fdwn.get(),
ml_algo=self._ML_buckling))
self._harmonizer_data[line] = {}
counter += 1
self.progress_count.set(counter)
self.progress_bar.update_idletasks()
if self._opt_results[line] != None:
self._opt_actual_running_time.config(text='Accumulated running time: \n'
+ str(time.time() - t_start) + ' sec')
# print('Runned', line, 'OK')
# else:
# print('Runned', line, 'NOT OK - no results')
self.draw_select_canvas()
if self._new_harmonizer.get() == True:
self._canvas_opt.config(bg='yellow')
self._canvas_opt.create_text(200, 200, text='Harmonizing results',
font='Verdana 14 bold')
self.opt_harmonizer_historic()
counter += 1
self.progress_bar.stop()
self.run_button.config(bg='green')
self.draw_properties()
def opt_harmonizer_historic(self):
# getting all acceptable solutions.
all_ok_checks = []
for line, data in self._opt_results.items():
for fail_ok in data[4]:
if fail_ok[0] == True:
# try:
# [round(val, 10) for val in fail_ok[2]]
# except TypeError:
# [print(val) for val in fail_ok[2]]
all_ok_checks.append(tuple([round(val,10) for val in fail_ok[2][0:6]]))
all_ok_checks = set(all_ok_checks)
# make iterator for multiprocessing
iterator = list()
to_check = (self._new_check_sec_mod.get(), self._new_check_min_pl_thk.get(),
self._new_check_shear_area.get(), self._new_check_buckling.get(),
self._new_check_fatigue.get(), self._new_check_slamming.get(),
self._new_check_local_buckling.get(), False, self._new_check_ml_buckling.get(), False)
iter_run_info = dict()
for slave_line in self._opt_results.keys():
input_pressures = self.get_pressure_input(slave_line)
iter_run_info[slave_line] = {'lateral pressure': input_pressures['lateral pressure'],
'fatigue pressure': input_pressures['fatigue pressure'],
'fatigue object':input_pressures['fatigue object'],
'slamming pressure':input_pressures['slamming pressure'],
'chk_calc_obj': self._opt_results[slave_line][0], 'ML-CL': [0,0],
'fup': self._new_fup.get(), 'fdwn': self._new_fdwn.get()}
iter_run_info['lines'] = list(self._opt_results.keys())
iter_run_info['checks'] = to_check
iter_run_info['keep spacing'] = self._keep_spacing.get()
for x_check in all_ok_checks:
iterator.append({'x': x_check, 'info': iter_run_info})
if to_check[8]:
# Do ML-CL checks
to_run = list()
for x_and_info in iterator:
for slave_line in x_and_info['info']['lines']:
iter_run_info = x_and_info['info']
lateral_press = iter_run_info[slave_line]['lateral pressure']
fat_press = iter_run_info[slave_line]['fatigue pressure']
fat_obj = iter_run_info[slave_line]['fatigue object']
slamming_pressure = iter_run_info[slave_line]['slamming pressure']
chk_calc_obj = iter_run_info[slave_line]['chk_calc_obj']
master_x = list(x_and_info['x'])
if iter_run_info['keep spacing']:
x = [chk_calc_obj.Plate.get_s()] + master_x[1:] + [chk_calc_obj.Plate.get_span(), chk_calc_obj.Plate.get_lg()]
else:
x = master_x + [chk_calc_obj.Plate.get_span(), chk_calc_obj.Plate.get_lg()]
fdwn = self._new_fdwn.get()
fup = self._new_fdwn.get()
calc_object = op.create_new_calc_obj(chk_calc_obj, x, fat_obj.get_fatigue_properties(), fdwn=fdwn,
fup=fup)
calc_object_stf = op.create_new_calc_obj(chk_calc_obj.Stiffener, x,
fat_obj.get_fatigue_properties(), fdwn=fdwn, fup=fup)
calc_object_pl = op.create_new_calc_obj(chk_calc_obj.Plate, x, fat_obj.get_fatigue_properties(),
fdwn=fdwn, fup=fup)
calc_object = [calc.AllStructure(Plate=calc_object_pl[0], Stiffener=calc_object_stf[0], Girder=None,
main_dict=chk_calc_obj.get_main_properties()['main dict']),
calc_object_pl[1]]
calc_object[0].lat_press = lateral_press
to_run.append((calc_object, x, lateral_press))
# ML-CL to be used.
sp_int, sp_gl_gt, up_int, up_gl_gt, \
sp_int_idx, sp_gl_gt_idx, up_int_idx, up_gl_gt_idx = \
list(), list(), list(), list(), list(), list(), list(), list()
# Create iterator
idx_count = 0
for calc_object, x, lat_press in to_run:
idx_count += 1
if calc_object[0].Plate.get_puls_sp_or_up() == 'UP':
if calc_object[0].Plate.get_puls_boundary() == 'Int':
up_int.append(calc_object[0].Stiffener.get_buckling_ml_input(lat_press, alone=False))
up_int_idx.append(idx_count-1)
else:
up_gl_gt_idx.append(idx_count-1)
else:
if calc_object[0].Stiffener.get_puls_boundary() == 'Int':
sp_int.append(calc_object[0].Stiffener.get_buckling_ml_input(lat_press, alone=False))
sp_int_idx.append(idx_count-1)
else:
sp_gl_gt.append(calc_object[0].Stiffener.get_buckling_ml_input(lat_press, alone=False))
sp_gl_gt_idx.append(idx_count-1)
# Predict
sort_again = np.zeros([len(to_run), 2])
if len(sp_int) != 0:
sp_int_res = [self._ML_buckling['cl SP buc int predictor'].predict(self._ML_buckling['cl SP buc int scaler']
.transform(sp_int)),
self._ML_buckling['cl SP ult int predictor'].predict(self._ML_buckling['cl SP buc int scaler']
.transform(sp_int))]
for idx, res_buc, res_ult in zip(sp_int_idx, sp_int_res[0], sp_int_res[1]):
sort_again[idx] = [res_buc, res_ult]
if len(sp_gl_gt) != 0:
sp_gl_gt_res = [self._ML_buckling['cl SP buc GLGT predictor'].predict(self._ML_buckling['cl SP buc GLGT scaler']
.transform(sp_gl_gt)),
self._ML_buckling['cl SP buc GLGT predictor'].predict(self._ML_buckling['cl SP buc GLGT scaler']
.transform(sp_gl_gt))]
for idx, res_buc, res_ult in zip(sp_gl_gt_idx, sp_gl_gt_res[0], sp_gl_gt_res[1]):
sort_again[idx] = [res_buc, res_ult]
if len(up_int) != 0:
up_int_res = [self._ML_buckling['cl UP buc int predictor'].predict(self._ML_buckling['cl UP buc int scaler']
.transform(up_int)),
self._ML_buckling['cl UP ult int predictor'].predict(self._ML_buckling['cl UP buc int scaler']
.transform(up_int))]
for idx, res_buc, res_ult in zip(up_int_idx, up_int_res[0], up_int_res[1]):
sort_again[idx] = [res_buc, res_ult]
if len(up_gl_gt) != 0:
up_gl_gt_res = [self._ML_buckling['cl UP buc GLGT predictor'].predict(self._ML_buckling['cl UP buc GLGT scaler']
.transform(up_gl_gt)),
self._ML_buckling['cl UP buc GLGT predictor'].predict(self._ML_buckling['cl UP buc GLGT scaler']
.transform(up_gl_gt))]
for idx, res_buc, res_ult in zip(up_gl_gt_idx, up_gl_gt_res[0], up_gl_gt_res[1]):
sort_again[idx] = [res_buc, res_ult]
for idx, x_and_info in enumerate(iterator):
for slave_line in x_and_info['info']['lines']:
iterator[idx]['info'][slave_line]['ML-CL'] = sort_again[idx]
# END ML-CL calc
processes = max(cpu_count() - 1, 1)
with Pool(processes) as my_process:
res_pre = my_process.map(helper_harmonizer_multi, iterator)
after_multirun_check_ok = list()
for res in res_pre:
if res is not None:
after_multirun_check_ok.append(res)
lowest_area, lowest_x = float('inf'), None
for ok_chkd in set(after_multirun_check_ok):
if sum(op.get_field_tot_area(ok_chkd )) < lowest_area:
lowest_area = sum(op.get_field_tot_area(ok_chkd))
lowest_x = ok_chkd
if lowest_area != float('inf'):
for line in self._opt_results.keys():
if self._keep_spacing:
this_x = [self._line_to_struc[line][0].Plate.get_s()] + list(lowest_x)[1:] + \
[self._line_to_struc[line][0].Plate.get_span(), self._line_to_struc[line][0].Plate.get_lg()]
else:
this_x = list(lowest_x) + [self._line_to_struc[line][0].Plate.get_span(),
self._line_to_struc[line][0].Plate.get_lg()]
calc_object_stf = op.create_new_calc_obj(self._line_to_struc[line][0].Plate, this_x,
fat_obj.get_fatigue_properties(), fdwn=fdwn, fup=fup)
calc_object_pl = op.create_new_calc_obj(self._line_to_struc[line][0].Stiffener, this_x,
fat_obj.get_fatigue_properties(), fdwn=fdwn, fup=fup)
self._opt_results[line][0] = [calc.AllStructure(Plate=calc_object_pl[0], Stiffener=calc_object_stf[0], Girder=None,
main_dict=chk_calc_obj.get_main_properties()['main dict']),
calc_object_pl[1]]
if self._line_to_struc[line][2] != None:
self._opt_results[line][2] = opt.create_new_calc_obj(init_obj= self._line_to_struc[line][1],
x = this_x,
fat_dict=self._line_to_struc[line]
[2].get_fatigue_properties())[1]
else:
self._line_to_struc[line][2] = None
return True
else:
for line in self._opt_results.keys():
self._opt_results[line][0] = None
return False
def opt_harmonizer(self):
'''
Harmonizes the results of you run.
:return:
'''
# Find highest section modulus.
harm_res= {}
chk = (self._new_check_sec_mod.get(), self._new_check_min_pl_thk.get(),
self._new_check_shear_area.get(), self._new_check_buckling.get(),
self._new_check_fatigue.get(), self._new_check_slamming.get(),
self._new_check_local_buckling.get())
for master_line in self._opt_results.keys():
master_obj = self._opt_results[master_line][0]
master_x = [master_obj.Plate.get_s(), master_obj.Plate.get_pl_thk(), master_obj.Stiffener.get_web_h(),
master_obj.Stiffener.get_web_thk(), master_obj.Stiffener.get_fl_w(),
master_obj.Stiffener.get_fl_thk(),
master_obj.Plate.get_span(),master_obj.Plate.get_lg()]
harm_res[master_line] = []
for slave_line in self._opt_results.keys():
input_pressures = self.get_pressure_input(slave_line)
lateral_press = input_pressures['lateral pressure']
fat_press = input_pressures['fatigue pressure']
fat_obj = input_pressures['fatigue object']
slamming_pressure = input_pressures['slamming pressure']
chk_calc_obj = self._opt_results[slave_line][1]
chk_result = list(op.run_optmizataion(chk_calc_obj,
master_x[0:6]+[chk_calc_obj.Plate.get_span(),
chk_calc_obj.Plate.get_lg()],
master_x[0:6]+[chk_calc_obj.Plate.get_span(),
chk_calc_obj.Plate.get_lg()],
lateral_press,self.get_deltas(),
algorithm=self._new_algorithm.get(),
trials=self._new_algorithm_random_trials.get(),
side=chk_calc_obj.Plate.get_side(), const_chk=chk,
pso_options=self.pso_parameters,fatigue_obj=fat_obj,
fat_press_ext_int=fat_press, slamming_press=slamming_pressure,
predefined_stiffener_iter = None,
processes=self._new_processes.get(), min_max_span=None,
use_weight_filter=True,
fdwn = self._new_fdwn.get(), fup = self._new_fdwn.get()))[0:4]
print('Master:', master_line, 'Slave', slave_line, 'Check', chk_result[-1])
harm_res[master_line].append(chk_result)
harmonized_area, harmonized_line =float('inf'), None
for master_line, all_slave_res in harm_res.items():
if all([slave_line_res[-1] for slave_line_res in all_slave_res]):
master_obj = self._opt_results[master_line][0]
master_area = sum(op.get_field_tot_area([master_obj.Plate.get_s(), master_obj.Plate.get_pl_thk(),
master_obj.Stiffener.get_web_h(), master_obj.Stiffener.get_web_thk(),
master_obj.Stiffener.get_fl_w(), master_obj.Stiffener.get_fl_thk(),
master_obj.Plate.get_span(),master_obj.Plate.get_lg()]))
if master_area < harmonized_area:
harmonized_area = master_area
harmonized_line = master_line
if harmonized_area != 0 and harmonized_line is not None:
harmonized_x_pl = self._opt_results[harmonized_line][0].Plate.get_tuple()
harmonized_x_stf = self._opt_results[harmonized_line][0].Stiffener.get_tuple()
harmonized_x = harmonized_x_pl[0:2] + harmonized_x_stf[2:]
for line in self._opt_results.keys():
self._opt_results[line][0] = opt.create_new_structure_obj(self._line_to_struc[line][0], harmonized_x)
calc_object_stf = op.create_new_calc_obj(self._line_to_struc[line][0].Plate, harmonized_x)
calc_object_pl = op.create_new_calc_obj(self._line_to_struc[line][0].Stiffener,harmonized_x)
self._opt_results[line][0] = [calc.AllStructure(Plate=calc_object_pl[0], Stiffener=calc_object_stf[0],
Girder=None, main_dict=chk_calc_obj.get_main_properties()['main dict']),
calc_object_pl[1]]
if self._line_to_struc[line][2] != None:
self._opt_results[line][2] = opt.create_new_calc_obj(init_obj= self._line_to_struc[line][1],
x = harmonized_x,
fat_dict=self._line_to_struc[line]
[2].get_fatigue_properties())[1]
else:
self._line_to_struc[line][2] = None
return True
else:
for line in self._opt_results.keys():
self._opt_results[line][0] = None
self._opt_results[line][1] = None
return False
def get_running_time(self):
'''
Estimate the running time of the algorithm.
:return:
'''
if self._new_algorithm.get() in ['anysmart', 'anydetail']:
try:
number_of_combinations = \
max((self._new_spacing_upper.get() - self._new_spacing_lower.get()) / self._new_delta_spacing.get(),
1) * \
max((self._new_pl_thk_upper.get() - self._new_pl_thk_lower.get()) / self._new_delta_pl_thk.get(), 1) * \
max((self._new_web_h_upper.get() - self._new_web_h_lower.get()) / self._new_delta_web_h.get(), 1) * \
max((self._new_web_thk_upper.get() - self._new_web_thk_lower.get()) / self._new_delta_web_thk.get(),
1) * \
max((self._new_fl_w_upper.get() - self._new_fl_w_lower.get()) / self._new_delta_fl_w.get(), 1) * \
max((self._new_fl_thk_upper.get() - self._new_fl_thk_lower.get()) / self._new_delta_fl_thk.get(), 1)
return int(number_of_combinations * self.running_time_per_item)*len(self._active_lines)
except TclError:
return 0
else:
try:
return int(self._new_algorithm_random_trials.get() * self.running_time_per_item)*len(self._active_lines)
except TclError:
return 0
def get_deltas(self):
'''
Return a numpy array of the deltas.
:return:
'''
return np.array([float(self._ent_delta_spacing.get()) / 1000, float(self._new_delta_pl_thk.get()) / 1000,
float(self._new_delta_web_h.get()) / 1000, float(self._new_delta_web_thk.get()) / 1000,
float(self._new_delta_fl_w.get()) / 1000, float(self._new_delta_fl_thk.get()) / 1000])
def update_running_time(self, *args):
'''
Estimate the running time of the algorithm.
:return:
'''
try:
self._runnig_time_label.config(text=str(self.get_running_time()))
except ZeroDivisionError:
pass # _tkinter.TclError: pass
if self._new_check_ml_buckling.get() == True:
self._new_check_buckling.set(False)
self._new_check_local_buckling.set(False)
def get_upper_bounds(self,obj):
'''
Return an numpy array of upper bounds.
:return:
'''
if self._keep_spacing:
spacing = obj.Plate.get_s()
else:
spacing = self._new_spacing_lower.get() / 1000
return np.array([spacing, self._new_pl_thk_upper.get() / 1000,
self._new_web_h_upper.get() / 1000, self._new_web_thk_upper.get() / 1000,
self._new_fl_w_upper.get() / 1000, self._new_fl_thk_upper.get() / 1000,
obj.Plate.get_span(), obj.Plate.get_lg()])
def get_lower_bounds(self,obj):
'''
Return an numpy array of lower bounds.
:return:
'''
if self._keep_spacing:
spacing = obj.Plate.get_s()
else:
spacing = self._new_spacing_lower.get() / 1000
return np.array([spacing, self._new_pl_thk_lower.get() / 1000,
self._new_web_h_lower.get() / 1000, self._new_web_thk_lower.get() / 1000,
self._new_fl_w_lower.get() / 1000, self._new_fl_thk_lower.get() / 1000,
obj.Plate.get_span(), obj.Plate.get_lg()])
def checkered(self, line_distance):
'''
Creates a grid in the properties canvas.
:param line_distance:
:return:
'''
# vertical lines at an interval of "line_distance" pixel
for x in range(line_distance, self._prop_canvas_dim[0], line_distance):
self._canvas_opt.create_line(x, 0, x, self._prop_canvas_dim[0], fill="grey", stipple='gray50')
# horizontal lines at an interval of "line_distance" pixel
for y in range(line_distance, self._prop_canvas_dim[1], line_distance):
self._canvas_opt.create_line(0, y, self._prop_canvas_dim[0], y, fill="grey", stipple='gray50')
def draw_properties(self,init_obj = None, opt_obj=None,line=None):
'''
Drawing properties in the canvas.
:return:
'''
ctr_x = self._prop_canvas_dim[0] / 2
ctr_y = self._prop_canvas_dim[1] / 2 + 200
opt_color, opt_stippe = 'red', 'gray12'
m = self._draw_scale
self._canvas_opt.delete('all')
if init_obj != None:
self.checkered(10)
init_color, init_stipple = 'blue', 'gray12'
self._canvas_opt.create_rectangle(0, 0, self._prop_canvas_dim[0] + 10, 80, fill='white')
self._canvas_opt.create_line(10, 10, 30, 10, fill=init_color, width=5)
self._canvas_opt.create_text(270, 10, text='Initial - Pl.: ' + str(init_obj.Plate.get_s() * 1000) + 'x' + str(
init_obj.Plate.get_pl_thk() * 1000) +
' Stf.: ' + str(init_obj.Stiffener.get_web_h() * 1000) + 'x' + str(
init_obj.Stiffener.get_web_thk() * 1000) + '+' +
str(init_obj.Stiffener.get_fl_w() * 1000) + 'x' +
str(init_obj.Stiffener.get_fl_thk() * 1000),
font='Verdana 8',
fill=init_color)
self._canvas_opt.create_text(120, 30, text='Weight (per Lg width): ' +
str(int(op.calc_weight([init_obj.Plate.get_s(),
init_obj.Plate.get_pl_thk(),
init_obj.Stiffener.get_web_h(),
init_obj.Stiffener.get_web_thk(),
init_obj.Stiffener.get_fl_w(),
init_obj.Stiffener.get_fl_thk(),
init_obj.Stiffener.get_span(),
init_obj.Stiffener.get_lg()]))),
font='Verdana 8', fill=init_color)
self._canvas_opt.create_rectangle(ctr_x - m * init_obj.Plate.get_s() / 2, ctr_y, ctr_x + m * init_obj.Plate.get_s() / 2,
ctr_y - m * init_obj.Plate.get_pl_thk(), fill=init_color, stipple=init_stipple)
self._canvas_opt.create_rectangle(ctr_x - m * init_obj.Stiffener.get_web_thk() / 2, ctr_y - m * init_obj.Stiffener.get_pl_thk(),
ctr_x + m * init_obj.Stiffener.get_web_thk() / 2, ctr_y - m * (init_obj.Stiffener.get_web_h() + init_obj.Stiffener.get_pl_thk())
, fill=init_color, stipple=init_stipple)
if init_obj.Stiffener.get_stiffener_type() not in ['L', 'L-bulb']:
self._canvas_opt.create_rectangle(ctr_x - m * init_obj.Stiffener.get_fl_w() / 2, ctr_y - m * (init_obj.Plate.get_pl_thk() + init_obj.Stiffener.get_web_h()),
ctr_x + m * init_obj.Stiffener.get_fl_w() / 2,
ctr_y - m * (init_obj.Plate.get_pl_thk() + init_obj.Stiffener.get_web_h() + init_obj.Stiffener.get_fl_thk()),
fill=init_color, stipple=init_stipple)
else:
self._canvas_opt.create_rectangle(ctr_x - m * init_obj.Stiffener.get_web_thk() / 2,
ctr_y - m * (init_obj.Plate.get_pl_thk() + init_obj.Stiffener.get_web_h()),
ctr_x + m * init_obj.Stiffener.get_fl_w(),
ctr_y - m * (init_obj.Plate.get_pl_thk() + init_obj.Stiffener.get_web_h() + init_obj.Stiffener.get_fl_thk()),
fill=init_color, stipple=init_stipple)
if opt_obj != None:
# [0.6, 0.012, 0.25, 0.01, 0.1, 0.01]
self._canvas_opt.config(bg = 'palegreen')
self._canvas_opt.create_rectangle(ctr_x - m * opt_obj.Plate.get_s() / 2, ctr_y,
ctr_x + m * opt_obj.Plate.get_s() / 2,
ctr_y - m * opt_obj.Plate.get_pl_thk(), fill=opt_color,
stipple=opt_stippe)
self._canvas_opt.create_rectangle(ctr_x - m * opt_obj.Stiffener.get_web_thk() / 2, ctr_y -
m * opt_obj.Plate.get_pl_thk(),
ctr_x + m * opt_obj.Stiffener.get_web_thk() / 2,
ctr_y - m * (
opt_obj.Stiffener.get_web_h() + opt_obj.Plate.get_pl_thk())
, fill=opt_color, stipple=opt_stippe)
if init_obj.Stiffener.get_stiffener_type() not in ['L', 'L-bulb']:
self._canvas_opt.create_rectangle(ctr_x - m * opt_obj.Stiffener.get_fl_w() / 2, ctr_y
- m * (
opt_obj.Plate.get_pl_thk() + opt_obj.Stiffener.get_web_h()),
ctr_x + m * opt_obj.Stiffener.get_fl_w() / 2, ctr_y -
m * (
opt_obj.Plate.get_pl_thk() + opt_obj.Stiffener.get_web_h() +
opt_obj.Stiffener.get_fl_thk()),
fill=opt_color, stipple=opt_stippe)
else:
self._canvas_opt.create_rectangle(ctr_x - m * opt_obj.Stiffener.get_web_thk() / 2, ctr_y
- m * (
opt_obj.Plate.get_pl_thk() + opt_obj.Stiffener.get_web_h()),
ctr_x + m * opt_obj.Stiffener.get_fl_w(), ctr_y -
m * (
opt_obj.Plate.get_pl_thk() + opt_obj.Stiffener.get_web_h() +
opt_obj.Stiffener.get_fl_thk()),
fill=opt_color, stipple=opt_stippe)
self._canvas_opt.create_line(10, 50, 30, 50, fill=opt_color, width=5)
self._canvas_opt.create_text(270, 50,
text='Optimized - Pl.: ' + str(round(opt_obj.Plate.get_s() * 1000,1)) + 'x' +
str(round(opt_obj.Plate.get_pl_thk() * 1000,1)) + ' Stf.: '
+ str(round(opt_obj.Stiffener.get_web_h() * 1000,1)) +
'x' + str(round(opt_obj.Stiffener.get_web_thk() * 1000,1)) + '+' +
str(round(opt_obj.Stiffener.get_fl_w() * 1000,1)) +
'x' + str(round(opt_obj.Stiffener.get_fl_thk() * 1000,1)),
font='Verdana 8', fill=opt_color)
self._canvas_opt.create_text(120, 70, text='Weight (per Lg width): '
+ str(int(op.calc_weight([opt_obj.Plate.get_s(),
opt_obj.Plate.get_pl_thk(),
opt_obj.Stiffener.get_web_h(),
opt_obj.Stiffener.get_web_thk(),
opt_obj.Stiffener.get_fl_w(),
opt_obj.Stiffener.get_fl_thk(),
opt_obj.Plate.get_span(),
opt_obj.Plate.get_lg()]))),
font='Verdana 8', fill=opt_color)
elif self._opt_results != {}:
self._canvas_opt.config(bg='green')
self._canvas_opt.create_text(200, 200, text='Optimization results avaliable.\n\n'
'Middle click orange lines to\n view results.',
font = 'Verdana 14 bold')
else:
self._canvas_opt.config(bg='mistyrose')
self._canvas_opt.create_text(200, 60, text='No optimization results found.', font = 'Verdana 14 bold')
if line != None:
if __name__ == '__main__':
lateral_press = 200 # for testing
else:
lateral_press = self.app.get_highest_pressure(line)['normal'] / 1000
self._canvas_opt.create_text(250, self._prop_canvas_dim[1]-10,
text= line + ' lateral pressure: '+str(lateral_press)+' kPa',
font='Verdana 10 bold',fill='red')
def draw_select_canvas(self, load_selected=False):
'''
Making the lines canvas.
:return:
'''
self._canvas_select.delete('all')
# grid for the canavs
self._canvas_select.create_line(self._canvas_draw_origo[0], 0, self._canvas_draw_origo[0], self._select_canvas_dim[1],
stipple='gray50')
self._canvas_select.create_line(0, self._canvas_draw_origo[1], self._select_canvas_dim[0], self._canvas_draw_origo[1],
stipple='gray50')
self._canvas_select.create_text(self._canvas_draw_origo[0] - 30 ,
self._canvas_draw_origo[1] + 20 , text='(0,0)',
font='Text 10')
self._canvas_select.create_text([800 ,60],
text='Mouse left click: select lines to loads\n'
'Mouse mid click: show properties for one line\n'
'Mouse right click: clear all selection\n'
'Shift key press: add selected line\n'
'Control key press: remove selected line\n\n'
'NOTE! Select lines you want to return before\n'
'pressing return button.', font='Verdana 8 bold',
fill='red')
# drawing the line dictionary.
if len(self._line_dict) != 0:
for line, value in self._line_dict.items():
color = 'black'
coord1 = self.get_point_canvas_coord('point' + str(value[0]))
coord2 = self.get_point_canvas_coord('point' + str(value[1]))
vector = [coord2[0] - coord1[0], coord2[1] - coord1[1]]
# drawing a bold line if it is selected
if line in self._active_lines:
width = 6
if line in self._opt_results.keys():
color, width = 'orange', 8
self._canvas_select.create_line(coord1, coord2, width=width, fill=color)
self._canvas_select.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 + 10,
text='Line ' + str(get_num(line)), font='Verdand 10 bold',
fill='red')
else:
if line in self._opt_results.keys():
color = 'orange'
self._canvas_select.create_line(coord1, coord2, width=3, fill=color)
self._canvas_select.create_text(coord1[0] - 20 + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 + 10,
text='line' + str(get_num(line)), font="Text 8", fill='black')
def algorithm_info(self):
''' When button is clicked, info is displayed.'''
messagebox.showinfo(title='Algorithm information',
message='The algorithms currently included is:\n'
'ANYSMART: \n'
' Calculates all alternatives using upper and lower bounds.\n'
' The step used inside the bounds is defined in deltas.\n\n'
'RANDOM: \n'
' Uses the same bounds and deltas as in ANYSMART.\n'
' Number of combinations calculated is defined in "trials",\n'
' which selects withing the bounds and deltas defined.\n\n'
'RANDOM_NO_BOUNDS:\n'
' Same as RANDOM, but does not use the defined deltas.\n'
' The deltas is set to 1 mm for all dimensions/thicknesses.\n\n'
'ANYDETAIL:\n'
' Same as for ANYSMART, but will take some more time and\n'
' provide a chart of weight development during execution.\n\n'
'PSO - Particle Swarm Search:\n'
' The information can be found on \n'
' http://pythonhosted.org/pyswarm/ \n'
' For further information google it!\n'
' Parameters:\n'
' swarmsize : The number of particles in the swarm (Default: 100)\n'
' omega : Particle velocity scaling factor (Default: 0.5)\n'
' phip : Scaling factor to search away from the particle’s \n'
' best known position (Default: 0.5)\n'
' phig : Scaling factor to search away from the swarm’s best \n'
' known position (Default: 0.5)\n'
' maxiter : The maximum number of iterations for the swarm \n'
' to search (Default: 100)\n'
' minstep : The minimum stepsize of swarm’s best position \n'
' before the search terminates (Default: 1e-8)\n'
' minfunc : The minimum change of swarm’s best objective value\n'
' before the search terminates (Default: 1e-8)\n\n'
'\n'
'All algorithms calculates local scantling and buckling requirements')
def slider_used(self, event):
'''
Action when slider is activated.
:return:
'''
self._canvas_scale = self.slider.get()
self.draw_canvas()
def on_closing(self):
'''
Action when closing the window without saving.
:return:
'''
if __name__ == '__main__':
self._frame.destroy()
return
mess = tk.messagebox.showwarning('Closed without saving', 'Closing will not save loads you have created',
type = 'okcancel')
if mess == 'ok':
self._frame.grab_release()
self._frame.destroy()
self.app.on_aborted_load_window()
def get_point_canvas_coord(self, point_no):
'''
Returning the canvas coordinates of the point. This value will change with slider.
:param point_no:
:return:
'''
point_coord_x = self._canvas_draw_origo[0] + self._point_dict[point_no][0]* self._canvas_scale
point_coord_y = self._canvas_draw_origo[1] - self._point_dict[point_no][1]* self._canvas_scale
return [point_coord_x, point_coord_y]
def controls(self):
'''
Specifying the controls to be used.
:return:
'''
self._canvas_select.bind('<Button-1>', self.left_click)
self._canvas_select.bind('<Button-2>', self.mid_click)
self._canvas_select.bind('<Button-3>', self.right_click)
self._frame.bind('<Shift_L>', self.shift_pressed)
self._frame.bind('<Shift_R>', self.shift_pressed)
self._frame.bind('<Control_L>', self.ctrl_pressed)
self._frame.bind('<Control_R>', self.ctrl_pressed)
self._frame.bind("<MouseWheel>", self.mouse_scroll)
self._frame.bind("<B2-Motion>", self.button_2_click_and_drag)
def shift_pressed(self,event=None):
'''
Event is executed when shift key pressed.
:return:
'''
self._add_to_lines = True
def ctrl_pressed(self,event=None):
'''
Event when control is pressed.
:param event:
:return:
'''
self._add_to_lines = False
def left_click(self, event):
'''
When clicking the right button, this method is called.
method is referenced in
'''
self._previous_drag_mouse = [event.x, event.y]
click_x = self._canvas_select.winfo_pointerx() - self._canvas_select.winfo_rootx()
click_y = self._canvas_select.winfo_pointery() - self._canvas_select.winfo_rooty()
stop = False
if len(self._line_dict) > 0:
for key, value in self._line_dict.items():
coord1x = self.get_point_canvas_coord('point' + str(value[0]))[0]
coord2x = self.get_point_canvas_coord('point' + str(value[1]))[0]
coord1y = self.get_point_canvas_coord('point' + str(value[0]))[1]
coord2y = self.get_point_canvas_coord('point' + str(value[1]))[1]
vector = [coord2x - coord1x, coord2y - coord1y]
click_x_range = [ix for ix in range(click_x - 10, click_x + 10)]
click_y_range = [iy for iy in range(click_y - 10, click_y + 10)]
distance = int(dist([coord1x, coord1y], [coord2x, coord2y]))
# checking along the line if the click is witnin +- 10 around the click
for dist_mult in range(1, distance - 1):
dist_mult = dist_mult / distance
x_check = int(coord1x) + int(round(vector[0] * dist_mult, 0))
y_check = int(coord1y) + int(round(vector[1] * dist_mult, 0))
if x_check in click_x_range and y_check in click_y_range:
self.line_is_active = True
if self._add_to_lines:
if key not in self._active_lines:
self._active_lines.append(key)
elif self._add_to_lines== False:
if key in self._active_lines:
self._active_lines.remove(key)
self._canvas_select.delete('all')
break
self.draw_select_canvas()
self.update_running_time()
def right_click(self,event):
'''
Event when right click.
:param evnet:
:return:
'''
self._previous_drag_mouse = [event.x, event.y]
self._active_lines = []
self._canvas_select.delete('all')
self.draw_select_canvas()
self.update_running_time()
def mid_click(self,event):
'''
Event when right click.
:param evnet:
:return:
'''
self._previous_drag_mouse = [event.x, event.y]
if self._opt_results == {}:
return
click_x = self._canvas_select.winfo_pointerx() - self._canvas_select.winfo_rootx()
click_y = self._canvas_select.winfo_pointery() - self._canvas_select.winfo_rooty()
if len(self._line_dict) > 0:
for key, value in self._line_dict.items():
coord1x = self.get_point_canvas_coord('point' + str(value[0]))[0]
coord2x = self.get_point_canvas_coord('point' + str(value[1]))[0]
coord1y = self.get_point_canvas_coord('point' + str(value[0]))[1]
coord2y = self.get_point_canvas_coord('point' + str(value[1]))[1]
vector = [coord2x - coord1x, coord2y - coord1y]
click_x_range = [ix for ix in range(click_x - 10, click_x + 10)]
click_y_range = [iy for iy in range(click_y - 10, click_y + 10)]
distance = int(dist([coord1x, coord1y], [coord2x, coord2y]))
# checking along the line if the click is witnin +- 10 around the click
for dist_mult in range(1, distance - 1):
dist_mult = dist_mult / distance
x_check = int(coord1x) + int(round(vector[0] * dist_mult, 0))
y_check = int(coord1y) + int(round(vector[1] * dist_mult, 0))
if x_check in click_x_range and y_check in click_y_range:
self._canvas_select.delete('all')
self._active_lines = []
self._active_lines.append(key)
if key in self._opt_results.keys() and self._opt_results[key]!=None:
self.draw_properties(init_obj=self._line_to_struc[key][0],opt_obj=self._opt_results[key][0],
line=key)
self._mid_click_line = key
else:
self.draw_properties(init_obj=self._line_to_struc[key][0],line=key)
self._mid_click_line = None
break
self.draw_select_canvas()
self.draw_select_canvas()
self.update_running_time()
def save_and_close(self):
'''
Save and close
:return:
'''
if __name__ == '__main__':
self._frame.destroy()
return
if self._opt_results == {}:
messagebox.showinfo(title='Nothing to return', message='No results to return.')
return
else:
to_return = {}
for line in self._active_lines:
if self._opt_results[line][0] is not None:
to_return[line] = self._opt_results[line]
else:
messagebox.showinfo(title='None in results, cannot return', message='None in results, c'
'annot return values.')
return
self.app.on_close_opt_multiple_window(to_return)
messagebox.showinfo(title='Return info', message='Returning: '+str(list(to_return.keys())) +
'\nLines without results are not returned.')
self._frame.destroy()
def toggle(self, found_files = None, obj = None, iterating = False):
'''
On off button.
:param found_files:
:param obj:
:return:
'''
if iterating:
if found_files is not None:
predefined_structure = hlp.helper_read_section_file(files=found_files, obj=obj.Plate)
else:
predefined_structure = None
if self._toggle_btn.config('relief')[-1] == 'sunken':
self._toggle_btn.config(relief="raised")
self._toggle_btn.config(bg = 'salmon')
self._ent_spacing_upper.config(bg = 'white')
self._ent_spacing_lower.config(bg = 'white')
self._ent_delta_spacing.config(bg = 'white')
else:
self._toggle_btn.config(relief="sunken")
self._toggle_btn.config(bg='lightgreen')
self._ent_spacing_upper.config(bg = 'lightgreen')
self._ent_spacing_lower.config(bg = 'lightgreen')
self._ent_delta_spacing.config(bg = 'lightgreen')
openfile = list(askopenfilenames(parent=self._frame, title='Choose files to open',
initialdir=self._root_dir))
if openfile == []:
self._toggle_btn.config(relief="raised")
self._toggle_btn.config(bg='salmon')
self._ent_spacing_upper.config(bg='white')
self._ent_spacing_lower.config(bg='white')
self._ent_delta_spacing.config(bg='white')
else:
self._filez = openfile
return found_files, predefined_structure
def toggle_harmonizer(self):
pass
def plot_results(self):
if self._mid_click_line is not None:
if len(self._opt_results[self._mid_click_line]) != 0:
op.plot_optimization_results(self._opt_results[self._mid_click_line])
def mouse_scroll(self,event):
self._canvas_scale += event.delta/50
self._canvas_scale = 0 if self._canvas_scale < 0 else self._canvas_scale
self.draw_select_canvas()
def button_2_click_and_drag(self,event):
self._canvas_draw_origo = (self._canvas_draw_origo[0]-(self._previous_drag_mouse[0]-event.x),
self._canvas_draw_origo[1]-(self._previous_drag_mouse[1]-event.y))
self._previous_drag_mouse = (event.x,event.y)
self.draw_select_canvas()
def open_example_file(self):
import os
if os.path.isfile('sections.csv'):
os.startfile('sections.csv')
else:
os.startfile(self._root_dir + '/' + 'sections.csv')
if __name__ == '__main__':
root = tk.Tk()
my_app = CreateOptimizeMultipleWindow(master=root)
root.mainloop() | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/optimize_multiple_window.py | optimize_multiple_window.py |
import tkinter as tk
import numpy as np
import time, os, datetime
from tkinter import messagebox
from tkinter.filedialog import askopenfilenames
from multiprocessing import cpu_count
try:
import any_files.main_application as main_application
import any_files.optimize as op
import any_files.example_data as test
except ModuleNotFoundError:
import ANYstructure.any_files.main_application as main_application
import ANYstructure.any_files.optimize as op
import ANYstructure.any_files.example_data as test
class CreateOptimizeCylinderWindow():
'''
This class initiates the single optimization window.
'''
def __init__(self,master,app=None):
super(CreateOptimizeCylinderWindow,self).__init__()
if __name__ == '__main__':
import pickle
import any_files.calc_structure as calc
self._initial_structure_obj = test.get_structure_calc_object(heavy=True)
self._initial_calc_obj = test.get_structure_calc_object(heavy=True)
self._fatigue_object = test.get_fatigue_object()
self._fatigue_pressure = test.get_fatigue_pressures()
self._slamming_pressure = test.get_slamming_pressure()
image_dir = os.path.dirname(__file__)+'\\images\\'
self._PULS_object = None
self._puls_acceptance = 0.87
self._initial_cylinder_obj = calc.CylinderAndCurvedPlate(main_dict=test.shell_main_dict,
shell=calc.Shell(test.shell_dict),
long_stf=calc.Structure(test.obj_dict_cyl_long2),
ring_stf=None,#calc.Structure(test.obj_dict_cyl_ring2),
ring_frame=None)#calc.Structure(test.obj_dict_cyl_heavy_ring2))
self._ML_buckling = dict() # Buckling machine learning algorithm
for name, file_base in zip(['cl SP buc int predictor', 'cl SP buc int scaler',
'cl SP ult int predictor', 'cl SP ult int scaler',
'cl SP buc GLGT predictor', 'cl SP buc GLGT scaler',
'cl SP ult GLGT predictor', 'cl SP ult GLGT scaler',
'cl UP buc int predictor', 'cl UP buc int scaler',
'cl UP ult int predictor', 'cl UP ult int scaler',
'cl UP buc GLGT predictor', 'cl UP buc GLGT scaler',
'cl UP ult GLGT predictor', 'cl UP ult GLGT scaler'
],
["ml_files\\CL_output_cl_buc_predictor_In-plane_support_cl_1_SP",
"ml_files\\CL_output_cl_buc_scaler_In-plane_support_cl_1_SP",
"ml_files\\CL_output_cl_ult_predictor_In-plane_support_cl_1_SP",
"ml_files\\CL_output_cl_ult_scaler_In-plane_support_cl_1_SP",
"ml_files\\CL_output_cl_buc_predictor_In-plane_support_cl_2,_3_SP",
"ml_files\\CL_output_cl_buc_scaler_In-plane_support_cl_2,_3_SP",
"ml_files\\CL_output_cl_ult_predictor_In-plane_support_cl_2,_3_SP",
"ml_files\\CL_output_cl_ult_scaler_In-plane_support_cl_2,_3_SP",
"ml_files\\CL_output_cl_buc_predictor_In-plane_support_cl_1_UP",
"ml_files\\CL_output_cl_buc_scaler_In-plane_support_cl_1_UP",
"ml_files\\CL_output_cl_ult_predictor_In-plane_support_cl_1_UP",
"ml_files\\CL_output_cl_ult_scaler_In-plane_support_cl_1_UP",
"ml_files\\CL_output_cl_buc_predictor_In-plane_support_cl_2,_3_UP",
"ml_files\\CL_output_cl_buc_scaler_In-plane_support_cl_2,_3_UP",
"ml_files\\CL_output_cl_ult_predictor_In-plane_support_cl_2,_3_UP",
"ml_files\\CL_output_cl_ult_scaler_In-plane_support_cl_2,_3_UP",
"ml_files\\CL_CSR-Tank_req_cl_predictor",
"ml_files\\CL_CSR-Tank_req_cl_UP_scaler",
"ml_files\\CL_CSR_plate_cl,_CSR_web_cl,_CSR_web_flange_cl,_CSR_flange_cl_predictor",
"ml_files\\CL_CSR_plate_cl,_CSR_web_cl,_CSR_web_flange_cl,_CSR_flange_cl_SP_scaler"]):
self._ML_buckling[name] = None
if os.path.isfile(file_base + '.pickle'):
file = open(file_base + '.pickle', 'rb')
self._ML_buckling[name] = pickle.load(file)
file.close()
self._ML_classes = {0: 'N/A',
1: 'A negative utilisation factor is found.',
2: 'At least one of the in-plane loads must be non-zero.',
3: 'Division by zero',
4: 'Overflow',
5: 'The aspect ratio exceeds the PULS code limit',
6: 'The global slenderness exceeds 4. Please reduce stiffener span or increase stiffener height.',
7: 'The applied pressure is too high for this plate field.', 8: 'web-flange-ratio',
9: 'UF below or equal 0.87', 10: 'UF between 0.87 and 1.0', 11: 'UF above 1.0'}
else:
self.app = app
self._initial_structure_obj = app._line_to_struc[app._active_line][0]
self._initial_calc_obj = app._line_to_struc[app._active_line][1]
self._initial_cylinder_obj = app._line_to_struc[app._active_line][5]
self._fatigue_object = app._line_to_struc[app._active_line][2]
try:
self._fatigue_pressure = app.get_fatigue_pressures(app._active_line,
self._fatigue_object.get_accelerations())
except AttributeError:
self._fatigue_pressure = None
try:
self._lateral_pressure = 0
except KeyError:
self._lateral_pressure = 0
try:
if self.app.get_highest_pressure(self.app._active_line)['slamming'] is None:
self._slamming_pressure = 0
else:
self._slamming_pressure = self.app.get_highest_pressure(self.app._active_line)['slamming']
except KeyError:
self._slamming_pressure = 0
image_dir = app._root_dir +'\\images\\'
self._root_dir = app._root_dir
self._PULS_object = app._PULS_results
self._puls_acceptance = self.app._new_puls_uf.get()
self._ML_buckling = app._ML_buckling
self._frame = master
self._frame.wm_title("Optimize structure")
self._frame.geometry('1600x900')
self._frame.grab_set()
'''
shell_upper_bounds = np.array( [0.03, 3, 5, 5, 10, None, None, None])
shell_deltas = np.array( [0.005, 0.5, 1, 0.1,1, None, None, None])
shell_lower_bounds = np.array( [0.02, 2.5, 5, 5, 10, None, None, None])
long_upper_bounds = np.array( [0.8, None, 0.5, 0.02, 0.2, 0.03, None, None])
long_deltas = np.array( [0.1, None, 0.1, 0.01, 0.1, 0.01, None, None])
long_lower_bounds = np.array( [0.7, None, 0.3, 0.01, 0.1, 0.01, None, None])
ring_stf_upper_bounds = np.array( [None, None, 0.5, 0.018, 0.2, 0.03, None, None])
ring_stf_deltas = np.array( [None, None, 0.1, 0.004, 0.1, 0.01, None, None])
ring_stf_lower_bounds = np.array( [None, None, 0.3, 0.010, 0.1, 0.010, None, None])
ring_frame_upper_bounds = np.array( [None, None, 0.9, 0.04, 0.3, 0.04, None, None])
ring_frame_deltas = np.array( [None, None, 0.2, 0.01, 0.1, 0.01, None, None])
ring_frame_lower_bounds = np.array( [None, None, 0.7, 0.02, 0.2, 0.02, None, None])
'''
ent_w = 12
default_shell_upper_bounds = np.array([0.03, 3, 5, 5, 10, None, None, None])
default_shell_deltas = np.array([0.005, 0.5, 1, 0.1, 1, None, None, None])
default_shell_lower_bounds = np.array([0.02, 2.5, 5, 5, 10, None, None, None])
default_long_upper_bounds = np.array([0.8, None, 0.5, 0.02, 0.2, 0.03, None, None])
default_long_deltas = np.array([0.1, None, 0.1, 0.01, 0.1, 0.01, None, None])
default_long_lower_bounds = np.array([0.7, None, 0.3, 0.01, 0.1, 0.01, None, None])
default_ring_stf_upper_bounds = np.array([None, None, 0.5, 0.018, 0.2, 0.03, None, None])
default_ring_stf_deltas = np.array([None, None, 0.1, 0.004, 0.1, 0.01, None, None])
default_ring_stf_lower_bounds = np.array([None, None, 0.3, 0.010, 0.1, 0.010, None, None])
default_ring_frame_upper_bounds = np.array([None, None, 0.9, 0.04, 0.3, 0.04, None, None])
default_ring_frame_deltas = np.array([None, None, 0.2, 0.01, 0.1, 0.01, None, None])
default_ring_frame_lower_bounds = np.array([None, None, 0.7, 0.02, 0.2, 0.02, None, None])
self._default_data = [[default_shell_upper_bounds,default_shell_deltas, default_shell_lower_bounds],
[default_long_upper_bounds, default_long_deltas, default_long_lower_bounds],
[default_ring_stf_upper_bounds, default_ring_stf_deltas, default_ring_stf_lower_bounds],
[default_ring_frame_upper_bounds, default_ring_frame_deltas,
default_ring_frame_lower_bounds]]
shell_example = [0.03, 3, 5, 5, 10, None, None, None]
long_example = ring_stf_example = ring_frame_example = [0.8, None, 0.5, 0.02, 0.2, 0.03, None, None]
shell_upper_bounds = [tk.DoubleVar() for dummy in shell_example]
shell_deltas = [tk.DoubleVar() for dummy in shell_example]
shell_lower_bounds = [tk.DoubleVar() for dummy in shell_example]
long_upper_bounds = [tk.DoubleVar() for dummy in long_example]
long_deltas = [tk.DoubleVar() for dummy in long_example]
long_lower_bounds = [tk.DoubleVar() for dummy in long_example]
ring_stf_upper_bounds = [tk.DoubleVar() for dummy in ring_stf_example]
ring_stf_deltas = [tk.DoubleVar() for dummy in ring_stf_example]
ring_stf_lower_bounds = [tk.DoubleVar() for dummy in ring_stf_example]
ring_frame_upper_bounds = [tk.DoubleVar() for dummy in ring_frame_example]
ring_frame_deltas = [tk.DoubleVar() for dummy in ring_frame_example]
ring_frame_lower_bounds = [tk.DoubleVar() for dummy in ring_frame_example]
self._new_geo_data = list()
self._new_geo_data = [[shell_upper_bounds,shell_deltas, shell_lower_bounds],
[long_upper_bounds, long_deltas, long_lower_bounds],
[ring_stf_upper_bounds, ring_stf_deltas, ring_stf_lower_bounds],
[ring_frame_upper_bounds, ring_frame_deltas, ring_frame_lower_bounds]]
self._new_entries = list()
map_type = {'shell':0, 'long':1, 'ring stf': 2, 'ring heavy':3}
map_type_idx = {0: True, 1: self._initial_cylinder_obj.LongStfObj is not None,
2: self._initial_cylinder_obj.RingStfObj is not None,
3: self._initial_cylinder_obj.RingFrameObj is not None}
for idx_1, geo_i in enumerate(self._new_geo_data):
all_geos = list()
if map_type_idx[idx_1] == False:
continue
for idx_2, entries in enumerate(geo_i):
these_ents = list()
for idx_3, ent_i in enumerate(entries):
self._new_geo_data[idx_1][idx_2][idx_3].trace('w', self.update_running_time)
these_ents.append(tk.Entry(self._frame,
textvariable = self._new_geo_data[idx_1][idx_2][idx_3], width = ent_w))
self._new_geo_data[idx_1][idx_2][idx_3].set(0 if self._default_data[idx_1][idx_2][idx_3] is None
else self._default_data[idx_1][idx_2][idx_3]*1000)
all_geos.append(these_ents)
self._new_entries.append(all_geos)
self._predefined_stiffener_iter = None
self._opt_runned = False
self._opt_results = ()
self._opt_actual_running_time = tk.Label(self._frame,text='',font='Verdana 12 bold')
self._draw_scale = 600
self._canvas_dim = (550, 500)
self._canvas_opt = tk.Canvas(self._frame,width=self._canvas_dim[0], height=self._canvas_dim[1],
background='azure',relief = 'groove', borderwidth=2)
# tk.Frame(self._frame,width=770,height=5, bg="grey", colormap="new").place(x=20,y=127)
# tk.Frame(self._frame, width=770, height=5, bg="grey", colormap="new").place(x=20, y=167)
self._canvas_opt.place(x=1000,y=350)
algorithms = ('anysmart cylinder','random','random_no_delta')
tk.Label(self._frame,text='-- Cylinder optimizer --',font='Verdana 15 bold').place(x=10,y=10)
# upper and lower bounds for optimization
#[0.6, 0.012, 0.3, 0.01, 0.1, 0.01]
self._new_algorithm = tk.StringVar()
self._new_algorithm_random_trials = tk.IntVar()
self._new_swarm_size = tk.IntVar()
self._new_omega = tk.DoubleVar()
self._new_phip = tk.DoubleVar()
self._new_phig = tk.DoubleVar()
self._new_maxiter = tk.IntVar()
self._new_minstep = tk.DoubleVar()
self._new_minfunc = tk.DoubleVar()
self._new_slamming_pressure = tk.DoubleVar()
self._new_fatigue_int_press = tk.DoubleVar()
self._new_fatigue_ext_press = tk.DoubleVar()
#additional choices for the random and pso algorithm
self._ent_algorithm = tk.OptionMenu(self._frame,self._new_algorithm,command=self.selected_algorithm,*algorithms)
self._ent_random_trials = tk.Entry(self._frame,textvariable=self._new_algorithm_random_trials)
pso_width = 10
self._ent_swarm_size = tk.Entry(self._frame,textvariable=self._new_swarm_size, width = pso_width)
self._ent_omega = tk.Entry(self._frame,textvariable=self._new_omega, width = pso_width)
self._ent_phip = tk.Entry(self._frame,textvariable=self._new_phip, width = pso_width)
self._ent_phig = tk.Entry(self._frame,textvariable=self._new_phig, width = pso_width)
self._ent_maxiter = tk.Entry(self._frame,textvariable=self._new_maxiter, width = pso_width)
self._ent_minstep = tk.Entry(self._frame,textvariable=self._new_minstep, width = pso_width)
self._ent_minfunc = tk.Entry(self._frame,textvariable=self._new_minfunc, width = pso_width)
# stresses in plate and stiffener
self._new_sasd = tk.DoubleVar()
self._new_smsd = tk.DoubleVar()
self._new_tTsd = tk.DoubleVar()
self._new_tQsd = tk.DoubleVar()
self._new_design_pressure = tk.DoubleVar()
self._new_shsd = tk.DoubleVar()
self._new_sasd.set(self._initial_cylinder_obj.sasd)
self._new_smsd.set(self._initial_cylinder_obj.smsd)
self._new_tTsd.set(self._initial_cylinder_obj.tTsd)
self._new_tQsd.set(self._initial_cylinder_obj.tQsd)
self._new_design_pressure.set(self._initial_cylinder_obj.psd)
self._new_shsd.set(self._initial_cylinder_obj.shsd)
self._ent_sasd = tk.Entry(self._frame, textvariable=self._new_sasd, width=ent_w)
self._ent_smsd = tk.Entry(self._frame, textvariable=self._new_smsd, width=ent_w)
self._ent_tTsd = tk.Entry(self._frame, textvariable=self._new_tTsd, width=ent_w)
self._ent_tQsd = tk.Entry(self._frame, textvariable=self._new_tQsd, width=ent_w)
self._ent_design_pressure = tk.Entry(self._frame, textvariable=self._new_design_pressure, width=ent_w)
self._ent_shsd = tk.Entry(self._frame, textvariable=self._new_shsd, width=ent_w)
start_x,start_y,dx,dy = 20,100,100,40
self._new_processes = tk.IntVar()
self._new_processes.set(max(cpu_count() - 1, 1))
tk.Label(self._frame, text='Processes\n (CPUs)', font='Verdana 9 bold', bg = 'silver')\
.place(x=start_x + 10 * dx, y=start_y - 1.1 * dy)
tk.Entry(self._frame, textvariable=self._new_processes, width = 12, bg = 'silver')\
.place(x=start_x + 10 * dx, y=start_y - 0.3 * dy)
self._runnig_time_label = tk.Label(self._frame, text='',font='Verdana 12 bold', fg = 'red')
self._runnig_time_label.place(x=start_x+4.3*dx, y=start_y + 2.8 * dy)
#tk.Label(self._frame, text='seconds ',font='Verdana 9 bold').place(x=start_x+6*dx, y=start_y + 2.8 * dy)
self._result_label = tk.Label(self._frame, text = '',font = 'Verdana 9 bold' )
self._result_label.place(x=start_x, y=start_y + 4.2 * dy)
'''
self._new_geo_data = [[shell_upper_bounds,shell_deltas, shell_lower_bounds],
[long_upper_bounds, long_deltas, long_lower_bounds],
[ring_stf_upper_bounds, ring_stf_deltas, ring_stf_lower_bounds],
[ring_frame_upper_bounds, ring_frame_deltas, ring_frame_lower_bounds]]
'''
shell = ['Shell thk. [mm]', 'Shell radius [mm]', 'l rings [mm]', 'L shell [mm]', 'L tot. [mm]', 'N/A - future',
'N/A - future', 'N/A - future']
stf_long = ['Spacing [mm]', 'N/A', 'Web height [mm]', 'Web thk. [mm]', 'Flange width [mm]',
'Flange thk. [mm]', 'N/A - future', 'N/A - future']
stf_ring = ['N/A', 'N/A', 'Web height [mm]', 'Web thk. [mm]', 'Flange width [mm]',
'Flange thk. [mm]', 'N/A - future', 'N/A - future']
all_label = [shell, stf_long, stf_ring, stf_ring]
text_i = ['Upper bounds [mm]', 'Iteration delta [mm]','Lower bounds [mm]']
kind = ['Shell or panel', 'Longitudinal stiffener', 'Ring stiffener', 'Ring frame/girder']
for idx_1, member in enumerate(self._new_entries):
if map_type_idx[idx_1] == False:
continue
for idx_2, bounds in enumerate(member):
tk.Label(self._frame, text=text_i[idx_2], font='Verdana 9').place(x=start_x,
y=start_y + dy * idx_1 * 4 + dy * idx_2)
if idx_2 == 0:
tk.Label(self._frame, text=kind[idx_1], font='Verdana 10 bold') \
.place(x=start_x, y=start_y + dy * idx_1 * 4 + dy * idx_2 - dy * 0.5)
for idx_3, entry_i in enumerate(bounds):
if idx_2 == 0:
tk.Label(self._frame, text=all_label[idx_1][idx_3], font='Verdana 7 bold')\
.place(x = start_x+dx*2 + idx_3*dx, y = start_y+dy*idx_1*4 + dy*idx_2 -dy*0.5)
entry_i.place(x = start_x+dx*2 + idx_3*dx, y = start_y+dy*idx_1*4 + dy*idx_2)
if 'N/A' in all_label[idx_1][idx_3]:
entry_i.configure(bg = 'grey')
###
#Labels for the pso
self._lb_swarm_size = tk.Label(self._frame,text='swarm size')
self._lb_omega = tk.Label(self._frame,text='omega')
self._lb_phip = tk.Label(self._frame,text='phip')
self._lb_phig = tk.Label(self._frame,text='phig')
self._lb_maxiter = tk.Label(self._frame,text='maxiter')
self._lb_minstep = tk.Label(self._frame,text='minstep')
self._lb_minfunc = tk.Label(self._frame,text='minfunc')
###
dys = 0.9*dy
tk.Label(self._frame, text='Design axial stress, sa,sd', font='Verdana 9')\
.place(x=start_x+10*dx,y=start_y+1*dys)
tk.Label(self._frame, text='Pa', font='Verdana 9')\
.place(x=start_x+dx*14,y=start_y+1*dys)
tk.Label(self._frame, text='Design bending stress, sm,sd', font='Verdana 9')\
.place(x=start_x+10*dx,y=start_y+2*dys)
tk.Label(self._frame, text='Pa', font='Verdana 9')\
.place(x=start_x+dx*14,y=start_y+2*dys)
tk.Label(self._frame, text='Design torsional stress, tT,sd', font='Verdana 9')\
.place(x=start_x+10*dx,y=start_y+3*dys)
tk.Label(self._frame, text='Pa', font='Verdana 9')\
.place(x=start_x+dx*14,y=start_y+3*dys)
tk.Label(self._frame, text='Design shear stress, tQ,sd', font='Verdana 9')\
.place(x=start_x+10*dx,y=start_y+4*dys)
tk.Label(self._frame, text='Pa', font='Verdana 9')\
.place(x=start_x+dx*14,y=start_y+4*dys)
tk.Label(self._frame, text='Design lateral pressure, psd', font='Verdana 9 bold')\
.place(x=start_x+10*dx,y=start_y+5*dys)
tk.Label(self._frame, text='Pa', font='Verdana 9')\
.place(x=start_x+dx*14,y=start_y+5*dys)
tk.Label(self._frame, text='Additional hoop stress, sh,sd, psd', font='Verdana 9 bold')\
.place(x=start_x+10*dx,y=start_y+6*dys)
tk.Label(self._frame, text='Pa', font='Verdana 9')\
.place(x=start_x+dx*14,y=start_y+6*dys)
self._ent_sasd.place(x=start_x+dx*13,y=start_y+1*dys)
self._ent_smsd.place(x=start_x+dx*13,y=start_y+2*dys)
self._ent_tTsd.place(x=start_x+dx*13,y=start_y+3*dys)
self._ent_tQsd.place(x=start_x + dx * 13, y=start_y + 4 * dys)
self._ent_design_pressure.place(x=start_x + dx * 13, y=start_y + 5 * dys)
self._ent_shsd.place(x=start_x + dx * 13, y=start_y +6 * dys)
if self._fatigue_pressure is not None:
tk.Label(self._frame, text='Fatigue pressure: internal= '+str(self._fatigue_pressure['p_int'])+ ' external= '
+str(self._fatigue_pressure['p_ext']), font='Verdana 7') \
.place(x=start_x + dx * 5, y=start_y + 19.3 * dy)
else:
tk.Label(self._frame, text='Fatigue pressure: internal= '+str(0)+ ' external= '
+str(0), font='Verdana 7') \
.place(x=start_x + dx * 5, y=start_y + 19.3 * dy)
#setting default values
init_dim = float(10) #mm
init_thk = float(1) #mm
self._new_slamming_pressure.set(self._slamming_pressure)
if self._fatigue_pressure is None:
self._new_fatigue_ext_press.set(0), self._new_fatigue_int_press.set(0)
else:
self._new_fatigue_int_press.set(self._fatigue_pressure['p_int']), \
self._new_fatigue_ext_press.set(self._fatigue_pressure['p_ext'])
self._new_algorithm.set('anysmart cylinder')
self._new_algorithm_random_trials.set(100000)
self._new_swarm_size.set(100)
self._new_omega.set(0.5)
self._new_phip.set(0.5)
self._new_phig.set(0.5)
self._new_maxiter.set(100)
self._new_minstep.set(1e-8)
self._new_minfunc.set(1e-8)
self._new_algorithm_random_trials.trace('w',self.update_running_time)
self._new_algorithm.trace('w',self.update_running_time)
# self.running_time_per_item = {'PULS':0.2489626556016598, 'RP': 1.009943181818182e-5}
# self.initial_weight = op.calc_weight([self._spacing,self._pl_thk,self._stf_web_h,self._stf_web_thk,
# self._fl_w,self._fl_thk,self._new_span.get(),self._new_width_lg.get()])
# img_file_name = 'img_plate_and_stiffener.gif'
# if os.path.isfile('images/' + img_file_name):
# file_path = 'images/' + img_file_name
# else:
# file_path = self._root_dir + '/images/' + img_file_name
# photo = tk.PhotoImage(file=file_path)
# label = tk.Label(self._frame,image=photo)
# label.image = photo # keep a reference!
# label.place(x=550, y=300)
# tk.Label(self._frame,text='Select algorithm', font = 'Verdana 8 bold').place(x=start_x+dx*11, y=start_y+7*dy)
# self._ent_algorithm.place(x=start_x+dx*11, y=start_y+dy*8)
self.algorithm_random_label = tk.Label(self._frame, text='Number of trials')
# tk.Button(self._frame,text='algorith information',command=self.algorithm_info,bg='white')\
# .place(x=start_x+dx*12.5, y=start_y+dy*7)
self.run_button = tk.Button(self._frame,text='RUN OPTIMIZATION!', command=self.run_optimizaion, bg='red',
font='Verdana 10 bold',fg='Yellow', relief="raised")
self.run_button.place(x=start_x+dx*11.5, y=start_y-dy, relwidth = 0.15)
# self.run_results = tk.Button(self._frame,text='show calculated', command=self.plot_results, bg='white',
# font='Verdana 10',fg='black')
# self.run_results.place(x=start_x+dx*8, y=start_y+dy*1.5)
self._opt_actual_running_time.place(x=start_x+dx*11, y=start_y)
self.close_and_save =tk.Button(self._frame,text='Return and replace initial structure with optimized',
command=self.save_and_close,bg='green',font='Verdana 10',fg='yellow')
self.close_and_save.place(x=start_x+dx*5,y=10)
tk.Button(self._frame, text='Open predefined stiffeners example',
command=self.open_example_file, bg='white', font='Verdana 10')\
.place(x=start_x+dx*10,y=10)
# Stress scaling
self._new_fup = tk.DoubleVar()
self._new_fup.set(0.5)
self._new_fdwn = tk.DoubleVar()
self._new_fdwn.set(1)
tk.Label(self._frame, text='Factor when scaling stresses up, fup')\
.place(x=start_x, y=start_y + 16 * dy)
ent_fup = tk.Entry(self._frame, textvariable=self._new_fup, width = 10)
ent_fup.place(x=start_x + dx * 3, y=start_y + 16 * dy)
tk.Label(self._frame, text='Factor when scaling stresses up, fdown')\
.place(x=start_x, y=start_y + 17 * dy)
ent_fdwn = tk.Entry(self._frame, textvariable=self._new_fdwn, width = 10)
ent_fdwn.place(x=start_x + dx * 3, y=start_y + 17 * dy)
# tk.Button(self._frame,text='Iterate predefiened stiffeners',command=self.open_multiple_files ,bg='yellow')\
# .place(x=start_x, y=start_y - dy * 2)
# command=lambda id="default": self.set_colors(id)
self._toggle_btn = tk.Button(self._frame, text="Iterate predefiened stiffeners", relief="raised",
command=self.toggle, bg = 'salmon')
self._toggle_btn.place(x= + 3*dx, y=start_y - dy * 2)
self._toggle_object, self._filez = self._initial_structure_obj, None
self.draw_properties()
self.update_running_time()
main_application.Application.draw_cylinder(text_size='Verdana 8 bold',
canvas = self._canvas_opt,
CylObj=self._initial_cylinder_obj,
start_x_cyl=350, start_y_cyl=300, text_x=230,
text_y=120)
def selected_algorithm(self,event):
'''
Action when selecting an algorithm.
:return:
'''
start_x, start_y, dx, dy = 20, 100, 100, 40
if self._new_algorithm.get()=='random' or self._new_algorithm.get()=='random_no_delta':
self._ent_random_trials.place_forget()
self.algorithm_random_label.place_forget()
self._lb_swarm_size.place_forget()
self._lb_omega.place_forget()
self._lb_phip.place_forget()
self._lb_phig.place_forget()
self._lb_maxiter.place_forget()
self._lb_minstep.place_forget()
self._lb_minfunc.place_forget()
self._ent_swarm_size.place_forget()
self._ent_omega.place_forget()
self._ent_phip.place_forget()
self._ent_phig.place_forget()
self._ent_maxiter.place_forget()
self._ent_minstep.place_forget()
self._ent_minfunc.place_forget()
self._ent_random_trials.place(x=start_x+dx*11.3, y=start_y+1.2*dy)
self.algorithm_random_label.place(x=start_x+dx*11.3, y=start_y+0.5*dy)
elif self._new_algorithm.get()=='anysmart' or self._new_algorithm.get()=='anydetail':
self._ent_random_trials.place_forget()
self.algorithm_random_label.place_forget()
self._lb_swarm_size.place_forget()
self._lb_omega.place_forget()
self._lb_phip.place_forget()
self._lb_phig.place_forget()
self._lb_maxiter.place_forget()
self._lb_minstep.place_forget()
self._lb_minfunc.place_forget()
self._ent_swarm_size.place_forget()
self._ent_omega.place_forget()
self._ent_phip.place_forget()
self._ent_phig.place_forget()
self._ent_maxiter.place_forget()
self._ent_minstep.place_forget()
self._ent_minfunc.place_forget()
elif self._new_algorithm.get()=='pso':
y_place_label =11.2
y_place = 12.2
self._ent_random_trials.place_forget()
self._lb_swarm_size.place(x=start_x+dx*y_place_label, y=start_y-2*dy)
self._lb_omega.place(x=start_x+dx*y_place_label, y=start_y-1*dy)
self._lb_phip.place(x=start_x+dx*y_place_label, y=start_y-0*dy)
self._lb_phig.place(x=start_x+dx*y_place_label, y=start_y+1*dy)
self._lb_maxiter.place(x=start_x+dx*y_place_label, y=start_y+2*dy)
self._lb_minstep.place(x=start_x+dx*y_place_label, y=start_y+3*dy)
self._lb_minfunc.place(x=start_x+dx*y_place_label, y=start_y+4*dy)
self._ent_swarm_size.place(x=start_x+dx*y_place, y=start_y-2*dy)
self._ent_omega.place(x=start_x+dx*y_place, y=start_y-1*dy)
self._ent_phip.place(x=start_x+dx*y_place, y=start_y+0*dy)
self._ent_phig.place(x=start_x+dx*y_place, y=start_y+1*dy)
self._ent_maxiter.place(x=start_x+dx*y_place, y=start_y+2*dy)
self._ent_minstep.place(x=start_x+dx*y_place, y=start_y+3*dy)
self._ent_minfunc.place(x=start_x+dx*y_place, y=start_y+4*dy)
def modify_structure_object(self):
''' Chaning parameters in the structure object before running. '''
pass
def run_optimizaion(self):
'''
function for button
:return:
'''
self.run_button.config(bg = 'white')
self.run_button.config(fg='red')
self.run_button.config(text='RUNNING OPTIMIZATION')
self.run_button.config(relief="sunken")
self._opt_actual_running_time.config(text='Run started ' + datetime.datetime.now().strftime("%H:%M:%S"))
self._opt_actual_running_time.update()
t_start = time.time()
self._opt_results, self._opt_runned = (), False
self.pso_parameters = (self._new_swarm_size.get(),self._new_omega.get(),self._new_phip.get(),
self._new_phig.get(),
self._new_maxiter.get(),self._new_minstep.get(),self._new_minfunc.get())
if self._fatigue_pressure is not None:
fat_press = ((self._fatigue_pressure['p_ext']['loaded'],self._fatigue_pressure['p_ext']['ballast'],
self._fatigue_pressure['p_ext']['part']),
(self._fatigue_pressure['p_int']['loaded'],self._fatigue_pressure['p_int']['ballast'],
self._fatigue_pressure['p_int']['part']))
else:
fat_press = None
self._new_sasd.set(self._new_sasd.get())
self._new_smsd.set(self._new_smsd.get())
self._new_tTsd.set(self._new_tTsd.get())
self._new_tQsd.set(self._new_tQsd.get())
self._new_design_pressure.set(self._new_design_pressure.get())
self._initial_cylinder_obj.psd = self._new_design_pressure.get()
self._new_shsd.set(self._new_shsd.get())
self._opt_results= op.run_optmizataion(initial_structure_obj= self._initial_cylinder_obj,
min_var= self.get_lower_bounds(),
max_var=self.get_upper_bounds(),lateral_pressure=
self._new_design_pressure.get(),
deltas= self.get_deltas(),algorithm=self._new_algorithm.get(),
trials=self._new_algorithm_random_trials.get(),
fatigue_obj=self._fatigue_object,
fat_press_ext_int=fat_press,
slamming_press = self._new_slamming_pressure.get(),
predefined_stiffener_iter=self._predefined_stiffener_iter,
processes=self._new_processes.get(),
use_weight_filter = True,
fdwn = self._new_fdwn.get(), fup = self._new_fdwn.get(),
cylinder = True)
if self._opt_results is not None and self._opt_results[0] is not None:
self._opt_actual_running_time.config(text='Actual running time: \n'
+str(round((time.time()-t_start)/60,4))+' min')
self._opt_actual_running_time.update()
self._opt_runned = True
#self._result_label.config(text=self._opt_results[0].__str__)
self._canvas_opt.delete('all')
main_application.Application.draw_cylinder(text_size='Verdana 8 bold',
canvas = self._canvas_opt,
CylObj=self._opt_results[0],
start_x_cyl=350, start_y_cyl=300, text_x=230,
text_y=120)
self._new_sasd.set(self._opt_results[0].sasd)
self._new_smsd.set(self._opt_results[0].smsd)
self._new_tTsd.set(self._opt_results[0].tTsd)
self._new_tQsd.set(self._opt_results[0].tQsd)
self._new_design_pressure.set(self._opt_results[0].psd)
self._new_shsd.set(self._opt_results[0].shsd)
#self.draw_properties()
else:
messagebox.showinfo(title='Nothing found', message='No better alternatives found. Modify input.\n'
'There may be no alternative that is acceptable.\n')
self.run_button.config(bg='green')
self.run_button.config(fg='yellow')
self.run_button.config(text='RUN OPTIMIZATION')
self.run_button.config(relief="raised")
def get_running_time(self):
'''
Estimate the running time of the algorithm.
:return:
'''
pass
def get_deltas(self):
'''
Return a numpy array of the deltas.
:return:
'''
all_deltas = list()
for idx_1, geo_i in enumerate(self._new_geo_data):
these_deltas = list()
for idx_3, val in enumerate(geo_i[1]):
these_deltas.append(val.get()/1000)
all_deltas.append(these_deltas)
return all_deltas
def update_running_time(self,*args):
'''
Estimate the running time of the algorithm.
:return:
'''
pass
def get_upper_bounds(self):
'''
Return an numpy array of upper bounds.
:return:
'''
all_upper = list()
for idx_1, geo_i in enumerate(self._new_geo_data):
these_upper = list()
for idx_3, val in enumerate(geo_i[0]):
these_upper.append(val.get()/1000)
all_upper.append(these_upper)
return all_upper
def get_lower_bounds(self):
'''
Return an numpy array of lower bounds.
:return:
'''
all_lower = list()
for idx_1, geo_i in enumerate(self._new_geo_data):
these_lower = list()
for idx_3, val in enumerate(geo_i[2]):
these_lower.append(val.get()/1000)
all_lower.append(these_lower)
return all_lower
def get_sigmas(self):
'''
Returns the stressess.
:return:
'''
return np.array([self._new_sasd.get(),self._new_smsd.get(),
self._new_tTsd.get(),self._new_tQsd.get(),
self._new_design_pressure.get(),self._new_shsd.get()])
def checkered(self,line_distance):
# vertical lines at an interval of "line_distance" pixel
for x in range(line_distance, self._canvas_dim[0], line_distance):
self._canvas_opt.create_line(x, 0, x, self._canvas_dim[0], fill="grey",stipple='gray50')
# horizontal lines at an interval of "line_distance" pixel
for y in range(line_distance, self._canvas_dim[1], line_distance):
self._canvas_opt.create_line(0, y, self._canvas_dim[0], y, fill="grey",stipple='gray50')
def draw_properties(self):
'''
Drawing properties in the canvas.
:return:
'''
self._canvas_opt.delete('all')
#self.checkered(10)
ctr_x = self._canvas_dim[0]/2
ctr_y = self._canvas_dim[1]/2+200
m = self._draw_scale
init_color,init_stipple = 'blue','gray12'
opt_color,opt_stippe = 'red','gray12'
# self._canvas_opt.create_rectangle(0,0,self._canvas_dim[0]+10,80,fill='white')
# self._canvas_opt.create_line(10,10,30,10,fill = init_color,width=5)
if self._opt_runned:
self._canvas_opt.create_rectangle(ctr_x - m * self._opt_results[0].get_s() / 2, ctr_y,
ctr_x + m * self._opt_results[0].get_s() / 2,
ctr_y - m * self._opt_results[0].get_pl_thk(), fill=opt_color,
stipple=opt_stippe)
self._canvas_opt.create_rectangle(ctr_x - m * self._opt_results[0].get_web_thk() / 2, ctr_y -
m * self._opt_results[0].get_pl_thk(),
ctr_x + m * self._opt_results[0].get_web_thk() / 2,
ctr_y - m * (self._opt_results[0].get_web_h() + self._opt_results[0].get_pl_thk())
, fill=opt_color, stipple=opt_stippe)
if self._opt_results[0].get_stiffener_type() not in ['L', 'L-bulb']:
self._canvas_opt.create_rectangle(ctr_x - m * self._opt_results[0].get_fl_w() / 2, ctr_y
- m * (self._opt_results[0].get_pl_thk()+ self._opt_results[0].get_web_h()),
ctr_x + m * self._opt_results[0].get_fl_w() / 2,ctr_y -
m * (self._opt_results[0].get_pl_thk() + self._opt_results[0].get_web_h() +
self._opt_results[0].get_fl_thk()),
fill=opt_color, stipple=opt_stippe)
else:
self._canvas_opt.create_rectangle(ctr_x - m * self._opt_results[0].get_web_thk() / 2, ctr_y
- m * (self._opt_results[0].get_pl_thk()+ self._opt_results[0].get_web_h()),
ctr_x + m * self._opt_results[0].get_fl_w() ,ctr_y -
m * (self._opt_results[0].get_pl_thk() + self._opt_results[0].get_web_h() +
self._opt_results[0].get_fl_thk()),
fill=opt_color, stipple=opt_stippe)
self._canvas_opt.create_line(10, 50, 30, 50, fill=opt_color, width=5)
self._canvas_opt.create_text(270,50,text='Optimized - Pl.: '+str(round(self._opt_results[0].get_s()*1000,1))
+'x'+ str(round(self._opt_results[0].get_pl_thk()*1000,1))+
' Stf.: '+str(round(self._opt_results[0].get_web_h()*1000,1))+
'x'+str(round(self._opt_results[0].get_web_thk()*1000,1))+'+'+
str(round(self._opt_results[0].get_fl_w()*1000,1))+
'x'+str(round(self._opt_results[0].get_fl_thk()*1000,1)),
font = 'Verdana 8',fill = opt_color)
def save_and_close(self):
'''
Save and close
:return:
'''
if __name__ == '__main__':
self._frame.destroy()
return
try:
self.app.on_close_opt_cyl_window(self._opt_results)
except (IndexError, TypeError):
messagebox.showinfo(title='Nothing to return',message='No results to return.')
return
self._frame.destroy()
def algorithm_info(self):
''' When button is clicked, info is displayed.'''
messagebox.showinfo(title='Algorith information',
message='The algorithms currently included is:\n'
'ANYSMART: \n'
' Calculates all alternatives using upper and lower bounds.\n'
' The step used inside the bounds is defined in deltas.\n'
' This algoritm uses MULTIPROCESSING and will be faster.\n\n'
'RANDOM: \n'
' Uses the same bounds and deltas as in ANYSMART.\n'
' Number of combinations calculated is defined in "trials",\n'
' which selects withing the bounds and deltas defined.\n\n'
'RANDOM_NO_BOUNDS:\n'
' Same as RANDOM, but does not use the defined deltas.\n'
' The deltas is set to 1 mm for all dimensions/thicknesses.\n\n'
'ANYDETAIL:\n'
' Same as for ANYSMART, but will take some more time and\n'
' provide a chart of weight development during execution.\n\n'
'PSO - Particle Swarm Search:\n'
' The information can be found on \n'
' http://pythonhosted.org/pyswarm/ \n'
' For further information google it!\n'
' Parameters:\n'
' swarmsize : The number of particles in the swarm (Default: 100)\n'
' omega : Particle velocity scaling factor (Default: 0.5)\n'
' phip : Scaling factor to search away from the particle’s \n'
' best known position (Default: 0.5)\n'
' phig : Scaling factor to search away from the swarm’s best \n'
' known position (Default: 0.5)\n'
' maxiter : The maximum number of iterations for the swarm \n'
' to search (Default: 100)\n'
' minstep : The minimum stepsize of swarm’s best position \n'
' before the search terminates (Default: 1e-8)\n'
' minfunc : The minimum change of swarm’s best objective value\n'
' before the search terminates (Default: 1e-8)\n\n'
'\n'
'All algorithms calculates local scantling and buckling requirements')
def toggle(self):
if self._toggle_btn.config('relief')[-1] == 'sunken':
self._toggle_btn.config(relief="raised")
self._toggle_btn.config(bg = 'salmon')
predefined_stiffener_iter = []
else:
self._toggle_btn.config(relief="sunken")
self._toggle_btn.config(bg = 'salmon')
self._toggle_btn.config(bg='lightgreen')
predefined_stiffener_iter = []
open_files = askopenfilenames(parent=self._frame, title='Choose files to open', initialdir=self._root_dir)
if self._initial_cylinder_obj.LongStfObj is not None:
predefined_stiffener_iter = hlp.helper_read_section_file(files=list(open_files),
obj=self._initial_cylinder_obj.LongStfObj)
if predefined_stiffener_iter == []:
self._toggle_btn.config(relief="raised")
self._toggle_btn.config(bg = 'salmon')
self._predefined_stiffener_iter = None
else:
self._predefined_stiffener_iter = predefined_stiffener_iter
self.update_running_time()
def open_example_file(self):
import os
if os.path.isfile('sections.csv'):
os.startfile('sections.csv')
else:
os.startfile(self._root_dir + '/' + 'sections.csv')
def show_calculated(self):
''' '''
pass
def plot_results(self):
if len(self._opt_results) != 0:
op.plot_optimization_results(self._opt_results)
def write_result_csv(self):
if len(self._opt_results) != 0:
print(self._opt_results)
def receive_progress_info():
'''
Get progress info from optimization algorithm.
:return:
'''
print('hi')
if __name__ == '__main__':
root = tk.Tk()
my_app = CreateOptimizeCylinderWindow(master=root)
root.mainloop() | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/optimize_cylinder.py | optimize_cylinder.py |
from scipy.special import gammaln
from scipy.stats import gamma as gammadist
import numpy as np
from scipy.integrate import simps
import os, time, datetime, json, random, math
from scipy.optimize import minimize
try:
import any_files.helper as hlp
import any_files.SN_curve_parameters as snc
except ModuleNotFoundError:
import ANYstructure.any_files.helper as hlp
import ANYstructure.any_files.SN_curve_parameters as snc
class Structure():
'''
Setting the properties for the plate and the stiffener. Takes a dictionary as argument.
'''
def __init__(self, main_dict, *args, **kwargs):
super(Structure,self).__init__()
self._main_dict = main_dict
if 'panel or shell' not in main_dict.keys():
self._panel_or_shell = 'panel'
else:
self._panel_or_shell = main_dict['panel or shell'][0]
self._plate_th = main_dict['plate_thk'][0]
self._web_height = main_dict['stf_web_height'][0]
self._web_th = main_dict['stf_web_thk'][0]
self._flange_width = main_dict['stf_flange_width'][0]
self._flange_th = main_dict['stf_flange_thk'][0]
self._mat_yield = main_dict['mat_yield'][0]
self._mat_factor = main_dict['mat_factor'][0]
self._span = main_dict['span'][0]
self._spacing = main_dict['spacing'][0]
self._structure_type = main_dict['structure_type'][0]
self._sigma_y1=main_dict['sigma_y1'][0]
self._sigma_y2=main_dict['sigma_y2'][0]
self._sigma_x1 = main_dict['sigma_x1'][0]
self._sigma_x2 = main_dict['sigma_x2'][0]
self._tauxy=main_dict['tau_xy'][0]
self._plate_kpp = main_dict['plate_kpp'][0]
self._stf_kps = main_dict['stf_kps'][0]
self._km1 = main_dict['stf_km1'][0]
self._km2 = main_dict['stf_km2'][0]
self._km3 = main_dict['stf_km3'][0]
self._stiffener_type=main_dict['stf_type'][0]
self._structure_types = main_dict['structure_types'][0]
self._dynamic_variable_orientation = None
if self._structure_type in self._structure_types['vertical']:
self._dynamic_variable_orientation = 'z - vertical'
elif self._structure_type in self._structure_types['horizontal']:
self._dynamic_variable_orientation = 'x - horizontal'
self._puls_method = main_dict['puls buckling method'][0]
self._puls_boundary = main_dict['puls boundary'][0]
self._puls_stf_end = main_dict['puls stiffener end'][0]
self._puls_sp_or_up = main_dict['puls sp or up'][0]
self._puls_up_boundary = main_dict['puls up boundary'][0]
self._zstar_optimization = main_dict['zstar_optimization'][0]
try:
self._girder_lg=main_dict['girder_lg'][0]
except KeyError:
self._girder_lg = 10
try:
self._pressure_side = main_dict['press_side'][0]
except KeyError:
self._pressure_side = 'both sides'
self._panel_or_shell = main_dict['panel or shell'][0]
# Property decorators are used in buckling of shells. IN mm!
@property # in mm
def hw(self):
return self._web_height * 1000
@hw.setter # in mm
def hw(self, val):
self._web_height = val / 1000
@property # in mm
def tw(self):
return self._web_th * 1000
@tw.setter # in mm
def tw(self, val):
self._web_th = val / 1000
@property # in mm
def b(self):
return self._flange_width * 1000
@b.setter # in mm
def b(self, val):
self._flange_width = val / 1000
@property # in mm
def tf(self):
return self._flange_th * 1000
@tf.setter # in mm
def tf(self, val):
self._flange_th = val / 1000
@property # in mm
def s(self):
return self._spacing* 1000
@s.setter # in mm
def s(self, val):
self._spacing = val / 1000
@property # in mm
def t(self):
return self._plate_th* 1000
@t.setter # in mm
def t(self, val):
self._plate_th = val / 1000
@property # in mm
def panel_or_shell(self):
return self._panel_or_shell
@panel_or_shell.setter # in mm
def panel_or_shell(self, val):
self._panel_or_shell = val
@property
def stiffener_type(self):
return self._stiffener_type
@stiffener_type.setter
def stiffener_type(self, val):
self._stiffener_type = val
def __str__(self):
'''
Returning all properties.
'''
return \
str(
'\n Plate field span: ' + str(round(self._span*1000)) + ' mm' +
'\n Stiffener spacing: ' + str(self._spacing*1000)+' mm'+
'\n Plate thickness: ' + str(self._plate_th*1000)+' mm'+
'\n Stiffener web height: ' + str(self._web_height*1000)+' mm'+
'\n Stiffener web thickness: ' + str(self._web_th*1000)+' mm'+
'\n Stiffener flange width: ' + str(self._flange_width*1000)+' mm'+
'\n Stiffener flange thickness: ' + str(self._flange_th*1000)+' mm'+
'\n Material yield: ' + str(self._mat_yield/1e6)+' MPa'+
'\n Structure/stiffener type: ' + str(self._structure_type)+'/'+(self._stiffener_type)+
'\n Dynamic load varible_ ' + str(self._dynamic_variable_orientation)+
'\n Plate fixation paramter,kpp: ' + str(self._plate_kpp) + ' ' +
'\n Stf. fixation paramter,kps: ' + str(self._stf_kps) + ' ' +
'\n Global stress, sig_y1/sig_y2: ' + str(round(self._sigma_y1,3))+'/'+str(round(self._sigma_y2,3))+ ' MPa' +
'\n Global stress, sig_x1/sig_x2: ' + str(round(self._sigma_x1,3))+'/'+str(round(self._sigma_x2,3))+ ' MPa' +
'\n Global shear, tau_xy: ' + str(round(self._tauxy,3)) + ' MPa' +
'\n km1,km2,km3: ' + str(self._km1)+'/'+str(self._km2)+'/'+str(self._km3)+
'\n Pressure side (p-plate/s-stf): ' + str(self._pressure_side) + ' ')
def get_beam_string(self, short = False):
''' Returning a string. '''
if type(self._stiffener_type) != str:
print('error')
base_name = self._stiffener_type+ '_' + str(round(self._web_height*1000, 0)) + 'x' + \
str(round(self._web_th*1000, 0))
if self._stiffener_type == 'FB':
ret_str = base_name
elif self._stiffener_type in ['L-bulb', 'bulb', 'hp']:
if not short:
ret_str = 'Bulb'+str(int(self._web_height*1000 + self._flange_th*1000))+'x'+\
str(round(self._web_th*1000, 0))+ '_(' +str(round(self._web_height*1000, 0)) + 'x' + \
str(round(self._web_th*1000, 0))+'_'+ str(round(self._flange_width*1000, 0)) + 'x' + \
str(round(self._flange_th*1000, 0))+')'
else:
ret_str = 'Bulb'+str(int(self._web_height*1000 + self._flange_th*1000))+'x'+\
str(round(self._web_th*1000, 0))
else:
ret_str = base_name + '__' + str(round(self._flange_width*1000, 0)) + 'x' + \
str(round(self._flange_th*1000, 0))
ret_str = ret_str.replace('.', '_')
return ret_str
# base_name = self._stiffener_type+ '_' + str(round(self._web_height*1000, 0)) + 'x' + \
# str(round(self._web_th*1000, 0))
# if self._stiffener_type == 'FB':
# ret_str = base_name
# else:
# ret_str = base_name + '__' + str(round(self._flange_width*1000, 0)) + 'x' + \
# str(round(self._flange_th*1000, 0))
#
# ret_str = ret_str.replace('.', '_')
#
# return ret_str
def get_structure_types(self):
return self._structure_types
def get_z_opt(self):
return self._zstar_optimization
def get_puls_method(self):
return self._puls_method
def get_puls_boundary(self):
return self._puls_boundary
def get_puls_stf_end(self):
return self._puls_stf_end
def get_puls_sp_or_up(self):
return self._puls_sp_or_up
def get_puls_up_boundary(self):
return self._puls_up_boundary
def get_one_line_string(self):
''' Returning a one line string. '''
return 'pl_'+str(round(self._spacing*1000, 1))+'x'+str(round(self._plate_th*1000,1))+' stf_'+self._stiffener_type+\
str(round(self._web_height*1000,1))+'x'+str(round(self._web_th*1000,1))+'+'\
+str(round(self._flange_width*1000,1))+'x'+\
str(round(self._flange_th*1000,1))
def get_report_stresses(self):
'Return the stresses to the report'
return 'sigma_y1: '+str(round(self._sigma_y1,1))+' sigma_y2: '+str(round(self._sigma_y2,1))+ \
' sigma_x1: ' + str(round(self._sigma_x1,1)) +' sigma_x2: ' + str(round(self._sigma_x2,1))+\
' tauxy: '+ str(round(self._tauxy,1))
def get_extended_string(self):
''' Some more information returned. '''
return 'span: '+str(round(self._span,4))+' structure type: '+ self._structure_type + ' stf. type: ' + \
self._stiffener_type + ' pressure side: ' + self._pressure_side
def get_sigma_y1(self):
'''
Return sigma_y1
:return:
'''
return self._sigma_y1
def get_sigma_y2(self):
'''
Return sigma_y2
:return:
'''
return self._sigma_y2
def get_sigma_x1(self):
'''
Return sigma_x
:return:
'''
return self._sigma_x1
def get_sigma_x2(self):
'''
Return sigma_x
:return:
'''
return self._sigma_x2
def get_tau_xy(self):
'''
Return tau_xy
:return:
'''
return self._tauxy
def get_s(self):
'''
Return the spacing
:return:
'''
return self._spacing
def get_pl_thk(self):
'''
Return the plate thickness
:return:
'''
return self._plate_th
def get_web_h(self):
'''
Return the web heigh
:return:
'''
return self._web_height
def get_web_thk(self):
'''
Return the spacing
:return:
'''
return self._web_th
def get_fl_w(self):
'''
Return the flange width
:return:
'''
return self._flange_width
def get_fl_thk(self):
'''
Return the flange thickness
:return:
'''
return self._flange_th
def get_fy(self):
'''
Return material yield
:return:
'''
return self._mat_yield
def get_mat_factor(self):
return self._mat_factor
def get_span(self):
'''
Return the span
:return:
'''
return self._span
def get_lg(self):
'''
Return the girder length
:return:
'''
return self._girder_lg
def get_kpp(self):
'''
Return var
:return:
'''
return self._plate_kpp
def get_kps(self):
'''
Return var
:return:
'''
return self._stf_kps
def get_km1(self):
'''
Return var
:return:
'''
return self._km1
def get_km2(self):
'''
Return var
:return:
'''
return self._km2
def get_km3(self):
'''
Return var
:return:
'''
return self._km3
def get_side(self):
'''
Return the checked pressure side.
:return:
'''
return self._pressure_side
def get_tuple(self):
''' Return a tuple of the plate stiffener'''
return (self._spacing, self._plate_th, self._web_height, self._web_th, self._flange_width,
self._flange_th, self._span, self._girder_lg, self._stiffener_type)
def get_section_modulus(self, efficient_se = None, dnv_table = False):
'''
Returns the section modulus.
:param efficient_se:
:return:
'''
#Plate. When using DNV table, default values are used for the plate
b1 = self._spacing if efficient_se==None else efficient_se
tf1 = self._plate_th
#Stiffener
tf2 = self._flange_th
b2 = self._flange_width
h = self._flange_th+self._web_height+self._plate_th
tw = self._web_th
hw = self._web_height
# cross section area
Ax = tf1 * b1 + tf2 * b2 + hw * tw
assert Ax != 0, 'Ax cannot be 0'
# distance to center of gravity in z-direction
ez = (tf1 * b1 * tf1 / 2 + hw * tw * (tf1 + hw / 2) + tf2 * b2 * (tf1 + hw + tf2 / 2)) / Ax
#ez = (tf1 * b1 * (h - tf1 / 2) + hw * tw * (tf2 + hw / 2) + tf2 * b2 * (tf2 / 2)) / Ax
# moment of inertia in y-direction (c is centroid)
Iyc = (1 / 12) * (b1 * math.pow(tf1, 3) + b2 * math.pow(tf2, 3) + tw * math.pow(hw, 3))
Iy = Iyc + (tf1 * b1 * math.pow(tf1 / 2, 2) + tw * hw * math.pow(tf1+hw / 2, 2) +
tf2 * b2 * math.pow(tf1+hw+tf2 / 2, 2)) - Ax * math.pow(ez, 2)
# elastic section moduluses y-axis
Wey1 = Iy / (h - ez)
Wey2 = Iy / ez
return Wey1, Wey2
def get_plasic_section_modulus(self):
'''
Returns the plastic section modulus
:return:
'''
tf1 = self._plate_th
tf2 = self._flange_th
b1 = self._spacing
b2 = self._flange_width
h = self._flange_th+self._web_height+self._plate_th
tw = self._web_th
hw = self._web_height
Ax = tf1 * b1 + tf2 * b2 + (h-tf1-tf2) * tw
ezpl = (Ax/2-b1*tf1)/tw+tf1
az1 = h-ezpl-tf1
az2 = ezpl-tf2
Wy1 = b1*tf1*(az1+tf1/2) + (tw/2)*math.pow(az1,2)
Wy2 = b2*tf2*(az2+tf2/2)+(tw/2)*math.pow(az2,2)
return Wy1+Wy2
def get_shear_center(self):
'''
Returning the shear center
:return:
'''
tf1 = self._plate_th
tf2 = self._flange_th
b1 = self._spacing
b2 = self._flange_width
h = self._flange_th+self._web_height+self._plate_th
tw = self._web_th
hw = self._web_height
Ax = tf1 * b1 + tf2 * b2 + (h-tf1-tf2) * tw
# distance to center of gravity in z-direction
ez = (b2*tf2*tf2/2 + tw*hw*(tf2+hw/2)+tf1*b1*(tf2+hw+tf1/2)) / Ax
# Shear center:
# moment of inertia, z-axis
Iz1 = tf1 * math.pow(b1, 3)
Iz2 = tf2 * math.pow(b2, 3)
ht = h - tf1 / 2 - tf2 / 2
return (Iz1 * ht) / (Iz1 + Iz2) + tf2 / 2 - ez
def get_moment_of_intertia_hp(self):
# class Stiffener:
# def __init__(self, h, tw, plate_width, plate_thickness):
# self.h = h
# self.tw = tw
# self.bf = 2.5 * self.tw
# self.tf = 2.5 * self.tw
# self.r = self.tw
# self.plate_width = plate_width
# self.plate_thickness = plate_thickness
#
# @classmethod
# def from_hp(cls, h, tw, plate_width, plate_thickness):
# return cls(h, tw, plate_width, plate_thickness)
#
# def _distance(self, x, c, r):
# y1 = (self.h - self.tf) / 2
# y2 = self.tf / 2
# if (x <= -c).any() or (x >= c).any():
# return np.abs(x) - c + r
# else:
# theta = np.arcsin((y1 - y2) / r)
# A1 = theta * r ** 2 / 2
# A2 = (y1 - r) * (x + c - r)
# A3 = (y2 - r) * (c - r) + r ** 2 * theta
# if x <= 0:
# return A1 - A2
# else:
# return A1 - A3 - (x - c - r) * y1
#
# def _integrand(self, x, c, r):
# y = self._distance(x, c, r)
# return y ** 2
#
# def get_moment_of_inertia_hp(self):
# # Create points for integration
# c = (self.h - 2 * self.tw) / 2
# r = self.tw
# x = np.linspace(-c, c, num=1000)
#
# # Integrate to find moment of inertia
# integrand = lambda x: self._integrand(x, c, r)
# y = self._distance(x, c, r)
# I = simps(y * integrand(x), x)
#
# return I
#
# def get_moment_of_inertia_plate(self):
# I_plate = (self.plate_width * self.plate_thickness ** 3) / 12
# return I_plate
#
# def get_moment_of_inertia_combined(self):
# I_stiffener = self.get_moment_of_inertia_hp()
# I_plate = self.get_moment_of_inertia_plate()
# I_combined = I_stiffener + I_plate
# return I_stiffener, I_plate, I_combined
class Stiffener:
def __init__(self, hp_height, hp_thickness, plate_width, plate_thickness):
self.hp_height = hp_height
self.hp_thickness = hp_thickness
self.plate_width = plate_width
self.plate_thickness = plate_thickness
def _integrand(self, x, c, r):
if (x <= -c).any() or (x >= c).any():
return (x ** 2 + r ** 2) ** 0.5
else:
return r
def _distance(self, x, c, r):
if (x <= -c).any() or (x >= c).any():
return self.hp_height / 2 - self.hp_thickness - r
elif x >= c:
return self.hp_height / 2 - self.hp_thickness - r
else:
return self.hp_height / 2 - self.hp_thickness - (x ** 2 - c ** 2) ** 0.5
def get_moment_of_inertia_hp(self):
# Create points for integration
c = (self.hp_height - 2 * self.hp_thickness) / 2
r = self.hp_thickness
x = np.linspace(-c, c, num=1000)
# Integrate to find moment of inertia
integrand = lambda x: self._integrand(x, c, r)
y = self._distance(x, c, r)
I = simps(y * integrand(x), x)
return I
def get_moment_of_inertia(self):
I_stiffener = self.get_moment_of_inertia_hp()
I_plate = self.plate_thickness * (self.plate_width ** 3 / 12)
I_total = I_stiffener + I_plate
return I_stiffener, I_plate, I_total
stiffener = Stiffener(hp_height=300, hp_thickness=12, plate_width=680, plate_thickness=25)
I_stiffener, I_plate, I_total = stiffener.get_moment_of_inertia()
print(f"Moment of inertia of stiffener: {I_stiffener:.3e} mm^4")
print(f"Moment of inertia of plate: {I_plate:.3e} mm^4")
print(f"Moment of inertia of combined: {I_total:.3e} mm^4")
def get_moment_of_intertia(self, efficent_se=None, only_stf = False, tf1 = None, reduced_tw = None,
plate_thk = None, plate_spacing = None):
'''
Returning moment of intertia.
:return:
'''
if only_stf:
tf1 = t = 0
b1 = s_e = 0
else:
tf1 = t = self._plate_th if tf1 == None else tf1
b1 = s_e =self._spacing if efficent_se==None else efficent_se
e_f = 0
h = self._flange_th+self._web_height+tf1
tw = self._web_th if reduced_tw == None else reduced_tw/1000
hw = self._web_height
tf2 = tf = self._flange_th
b2 = bf = self._flange_width
Ax = tf1 * b1 + tf2 * b2 + hw * tw
Iyc = (1 / 12) * (b1 * math.pow(tf1, 3) + b2 * math.pow(tf2, 3) + tw * math.pow(hw, 3))
ez = (tf1 * b1 * (h - tf1 / 2) + hw * tw * (tf2 + hw / 2) + tf2 * b2 * (tf2 / 2)) / Ax
Iy = Iyc + (tf1 * b1 * math.pow(tf2 + hw + tf1 / 2, 2) + tw * hw * math.pow(tf2 + hw / 2, 2) +
tf2 * b2 * math.pow(tf2 / 2, 2)) - Ax * math.pow(ez, 2)
# ###
# z_c = bf * tf * e_f / (s_e * t + hw * tw + bf * tf)
# I_z = 1.0 / 12.0 * t * math.pow(s_e,3) + 1.0 / 12.0 * hw * math.pow(tw,3) + 1.0 / 12.0 * tf * math.pow(bf,3) +\
# t * s_e * math.pow(z_c,2) + \
# tw * hw * math.pow(z_c,2) + bf * tf * math.pow(e_f - z_c,2)
# ###
#
# z_c = (bf * tf * (tf / 2.0 + t / 2.0 + hw) + hw * tw * (hw / 2.0 + t / 2.0)) / (s_e * t + hw * tw + bf * tf)
# I_sef = 1.0 / 12.0 * tw * hw ** 3 + 1.0 / 12.0 * bf * tf ** 3 + 1.0 / 12.0 * s_e * t ** 3 + tw * hw * (
# hw / 2.0 + t / 2.0 - z_c) ** 2 + tf * bf * (hw + t / 2.0 + tf / 2.0 - z_c) ** 2 + s_e * t * z_c ** 2
# print(I_sef, I_z, Iy)
#print(2*(bf*(h/2)**3/12 + tf*(h-tf)**3/12) + tw*h**3/12, Iy)
return Iy
def get_Iz_moment_of_inertia(self, reduced_tw = None):
tw = self._web_th*1000 if reduced_tw is None else reduced_tw
hw = self._web_height * 1000
tf2 = self._flange_th * 1000
b2 = self._flange_width * 1000
if self._stiffener_type == 'FB':
Iz = math.pow(tw,3)*hw/12
elif self._stiffener_type == 'T':
Iz = hw*math.pow(tw,3)/12 + tf2*math.pow(b2,3)/12
else:
Czver = tw/2
Czhor = b2/2
Aver = hw*tw
Ahor = b2*tf2
Atot = Aver+Ahor
Czoverall = Aver*Czver/Atot + Ahor*Czhor/Atot
dz = Czver - Czoverall
Iver = (1/12)*hw*math.pow(tw,3) + Aver*math.pow(dz,2)
dz = Czhor-Czoverall
Ihor = (1/12)*tf2*math.pow(b2,3) + Ahor*math.pow(dz,2)
Iz = Iver + Ihor
return Iz
def get_moment_of_interia_iacs(self, efficent_se=None, only_stf = False, tf1 = None):
if only_stf:
tf1 = 0
b1 = 0
else:
tf1 = self._plate_th if tf1 == None else tf1
b1 = self._spacing if efficent_se==None else efficent_se
h = self._flange_th+self._web_height+tf1
tw = self._web_th
hw = self._web_height
tf2 = self._flange_th
b2 = self._flange_width
Af = b2*tf2
Aw = hw*tw
ef = hw + tf2/2
Iy = (Af*math.pow(ef,2)*math.pow(b2,2)/12) * ( (Af+2.6*Aw) / (Af+Aw))
return Iy
def get_torsional_moment_venant(self, reduced_tw = None, efficient_flange = True):
# if efficient_flange:
# ef = self.get_ef_iacs()*1000
# else:
# ef = self._flange_width * 1000
tf = self._flange_th*1000
tw = self._web_th*1000 if reduced_tw is None else reduced_tw
bf = self._flange_width*1000
hw = self._web_height*1000
# if self._stiffener_type == 'FB':
# It = ((hw*math.pow(tw,3)/3e4) * (1-0.63*(tw/hw)) )
# else:
# It = ((((ef-0.5*tf)*math.pow(tw,3))/3e4) * (1-0.63*(tw/(ef-0.5*tf))) + ((bf*math.pow(tf,3))/3e4)
# * (1-0.63*(tf/bf)) )
# G = 80769.2
# It2 = (2/3) * (math.pow(tw,3)*hw + bf*math.pow(tf, 3)) *(hw+tf/2)
# print(It, It2*G)
# print(hw, tw, bf, tf)
I_t1 = 1.0 / 3.0 * math.pow(tw , 3) * hw + 1.0 / 3.0 * math.pow(tf, 3) * bf
# I_t2 = 1.0 / 3.0 * math.pow(tw , 3) * (hw + tf) + 1.0 / 3.0 * math.pow(tf, 3) * (bf - tw)
# print('It', I_t1, I_t2, It* 1e4)
return I_t1# * 1e4
def get_polar_moment(self, reduced_tw = None):
tf = self._flange_th*1000
tw = self._web_th*1000 if reduced_tw is None else reduced_tw
ef = self.get_flange_eccentricity()*1000
hw = self._web_height*1000
b = self._flange_width*1000
#Ipo = (A|w*(ef-0.5*tf)**2/3+Af*ef**2)*10e-4 #polar moment of interia in cm^4
#Ipo = (tw/3)*math.pow(hw, 3) + tf*(math.pow(hw+tf/2,2)*b)+(tf/3)*(math.pow(ef+b/2,3)-math.pow(ef-b/2,3))
# C24/3*C70^3+C26*((C70+C26/2)^2*C25)+C26/3*((C72+C25/2)^3-(C72-C25/2)^3) + (C25*C26^3)/12 + (C70*C24^3)/12
Ipo = tw/3*math.pow(hw, 3)+tf*(math.pow(hw+tf/2,2)*b)+tf/3*(math.pow(ef+b/2,3)-math.pow(ef-b/2,3)) + \
(b*math.pow(tf,3))/12 + (hw*math.pow(tw,3))/12
return Ipo
def get_flange_eccentricity(self):
ecc = 0 if self._stiffener_type in ['FB', 'T'] else self._flange_width / 2 - self._web_th / 2
return ecc
def get_ef_iacs(self):
if self._stiffener_type == 'FB':
ef = self._web_height
# elif self._stiffener_type == 'L-bulb':
# ef = self._web_height-0.5*self._flange_th
elif self._stiffener_type in ['L', 'T', 'L-bulb', 'HP-profile', 'HP', 'HP-bulb']:
ef = self._web_height + 0.5*self._flange_th
return ef
def get_stf_cog_eccentricity(self):
e = (self._web_height * self._web_th * (self._web_height / 2) + self._flange_width * self._flange_th *
(self._web_height + self._web_th / 2)) / (self._web_height * self._web_th + self._flange_width * self._flange_th)
return e
def get_structure_prop(self):
return self._main_dict
def get_structure_type(self):
return self._structure_type
def get_stiffener_type(self):
return self._stiffener_type
def get_shear_area(self):
'''
Returning the shear area in [m^2]
:return:
'''
return ((self._flange_th*self._web_th) + (self._web_th*self._plate_th) + (self._web_height*self._web_th))
def set_main_properties(self, main_dict):
'''
Resettting all properties
:param input_dictionary:
:return:
'''
self._main_dict = main_dict
self._plate_th = main_dict['plate_thk'][0]
self._web_height = main_dict['stf_web_height'][0]
self._web_th = main_dict['stf_web_thk'][0]
self._flange_width = main_dict['stf_flange_width'][0]
self._flange_th = main_dict['stf_flange_thk'][0]
self._mat_yield = main_dict['mat_yield'][0]
self._mat_factor = main_dict['mat_factor'][0]
self._span = main_dict['span'][0]
self._spacing = main_dict['spacing'][0]
self._structure_type = main_dict['structure_type'][0]
self._sigma_y1=main_dict['sigma_y1'][0]
self._sigma_y2=main_dict['sigma_y2'][0]
self._sigma_x1 = main_dict['sigma_x1'][0]
self._sigma_x2 = main_dict['sigma_x2'][0]
self._tauxy=main_dict['tau_xy'][0]
self._plate_kpp = main_dict['plate_kpp'][0]
self._stf_kps = main_dict['stf_kps'][0]
self._km1 = main_dict['stf_km1'][0]
self._km2 = main_dict['stf_km2'][0]
self._km3 = main_dict['stf_km3'][0]
self._stiffener_type=main_dict['stf_type'][0]
try:
self._girder_lg=main_dict['girder_lg'][0]
except KeyError:
self._girder_lg = 10
try:
self._pressure_side = main_dict['press_side'][0]
except KeyError:
self._pressure_side = 'p'
self._zstar_optimization = main_dict['zstar_optimization'][0]
self._puls_method = main_dict['puls buckling method'][0]
self._puls_boundary = main_dict['puls boundary'][0]
self._puls_stf_end = main_dict['puls stiffener end'][0]
self._puls_sp_or_up = main_dict['puls sp or up'][0]
self._puls_up_boundary = main_dict['puls up boundary'][0]
self._panel_or_shell = main_dict['panel or shell'][0]
def set_stresses(self,sigy1,sigy2,sigx1,sigx2,tauxy):
'''
Setting the global stresses.
:param sigy1:
:param sigy2:
:param sigx:
:param tauxy:
:return:
'''
self._main_dict['sigma_y1'][0]= sigy1
self._sigma_y1 = sigy1
self._main_dict['sigma_y2'][0]= sigy2
self._sigma_y2 = sigy2
self._main_dict['sigma_x1'][0]= sigx1
self._sigma_x1 = sigx1
self._main_dict['sigma_x2'][0]= sigx2
self._sigma_x2 = sigx2
self._main_dict['tau_xy'][0]= tauxy
self._tauxy = tauxy
def get_cross_section_area(self, efficient_se = None, include_plate = True):
'''
Returns the cross section area.
:return:
'''
tf1 = self._plate_th if include_plate else 0
tf2 = self._flange_th
if include_plate:
b1 = self._spacing if efficient_se==None else efficient_se
else:
b1 = 0
b2 = self._flange_width
#h = self._flange_th+self._web_height+self._plate_th
h = self._web_height
tw = self._web_th
#print('Plate: thk', tf1, 's', b1, 'Flange: thk', tf2, 'width', b2, 'Web: thk', tw, 'h', h)
return tf1 * b1 + tf2 * b2 + h * tw
def get_cross_section_centroid_with_effective_plate(self, se = None, tf1 = None, include_plate = True,
reduced_tw = None):
'''
Returns cross section centroid
:return:
'''
# checked with example
if include_plate:
tf1 = self._plate_th if tf1 == None else tf1
b1 = self._spacing if se == None else se
else:
tf1 = 0
b1 = 0
tf2 = self._flange_th
b2 = self._flange_width
tw = self._web_th if reduced_tw == None else reduced_tw/1000
hw = self._web_height
Ax = tf1 * b1 + tf2 * b2 + hw * tw
effana = (tf1 * b1 * tf1/2 + hw * tw * (tf1 + hw / 2) + tf2 * b2 * (tf1+hw+tf2/2)) / Ax
return effana
def get_weight(self):
'''
Return the weight.
:return:
'''
return 7850*self._span*(self._spacing*self._plate_th+self._web_height*self._web_th+self._flange_width*self._flange_th)
def get_weight_width_lg(self):
'''
Return the weight including Lg
:return:
'''
pl_area = self._girder_lg*self._plate_th
stf_area = (self._web_height*self._web_th+self._flange_width*self._flange_th)*(self._girder_lg//self._spacing)
return (pl_area+stf_area)*7850*self._span
def set_span(self,span):
'''
Setting the span. Used when moving a point.
:return:
'''
self._span = span
self._main_dict['span'][0] = span
def get_puls_input(self, run_type: str = 'SP'):
if self._stiffener_type == 'FB':
stf_type = 'F'
else:
stf_type = self._stiffener_type
map_boundary = {'Continuous': 'C', 'Sniped': 'S'}
sig_x1 = self._sigma_x1
sig_x2 = self._sigma_x2
if sig_x1 * sig_x2 >= 0:
sigxd = sig_x1 if abs(sig_x1) > abs(sig_x2) else sig_x2
else:
sigxd = max(sig_x1, sig_x2)
if self._puls_sp_or_up == 'SP':
return_dict = {'Identification': None, 'Length of panel': self._span*1000, 'Stiffener spacing': self._spacing*1000,
'Plate thickness': self._plate_th*1000,
'Number of primary stiffeners': 10,
'Stiffener type (L,T,F)': stf_type,
'Stiffener boundary': map_boundary[self._puls_stf_end]
if map_boundary[self._puls_stf_end] in ['C', 'S']
else 'C' if self._puls_stf_end == 'Continuous' else 'S',
'Stiff. Height': self._web_height*1000, 'Web thick.': self._web_th*1000,
'Flange width': self._flange_width*1000,
'Flange thick.': self._flange_th*1000, 'Tilt angle': 0,
'Number of sec. stiffeners': 0, 'Modulus of elasticity': 2.1e11/1e6, "Poisson's ratio": 0.3,
'Yield stress plate': self._mat_yield/1e6, 'Yield stress stiffener': self._mat_yield/1e6,
'Axial stress': 0 if self._puls_boundary == 'GT' else sigxd,
'Trans. stress 1': 0 if self._puls_boundary == 'GL' else self._sigma_y1,
'Trans. stress 2': 0 if self._puls_boundary == 'GL' else self._sigma_y2,
'Shear stress': self._tauxy,
'Pressure (fixed)': None, 'In-plane support': self._puls_boundary,
'sp or up': self._puls_sp_or_up}
else:
boundary = self._puls_up_boundary
blist = list()
if len(boundary) != 4:
blist = ['SS', 'SS', 'SS', 'SS']
else:
for letter in boundary:
if letter.upper() == 'S':
blist.append('SS')
elif letter.upper() == 'C':
blist.append('CL')
else:
blist.append('SS')
return_dict = {'Identification': None, 'Length of plate': self._span*1000, 'Width of c': self._spacing*1000,
'Plate thickness': self._plate_th*1000,
'Modulus of elasticity': 2.1e11/1e6, "Poisson's ratio": 0.3,
'Yield stress plate': self._mat_yield/1e6,
'Axial stress 1': 0 if self._puls_boundary == 'GT' else sigxd,
'Axial stress 2': 0 if self._puls_boundary == 'GT' else sigxd,
'Trans. stress 1': 0 if self._puls_boundary == 'GL' else self._sigma_y1,
'Trans. stress 2': 0 if self._puls_boundary == 'GL' else self._sigma_y2,
'Shear stress': self._tauxy, 'Pressure (fixed)': None, 'In-plane support': self._puls_boundary,
'Rot left': blist[0], 'Rot right': blist[1], 'Rot upper': blist[2], 'Rot lower': blist[3],
'sp or up': self._puls_sp_or_up}
return return_dict
def get_buckling_ml_input(self, design_lat_press: float = 0, sp_or_up: str = 'SP', alone = True, csr = False):
'''
Classes in data from ML
{'negative utilisation': 1, 'non-zero': 2, 'Division by zero': 3, 'Overflow': 4, 'aspect ratio': 5,
'global slenderness': 6, 'pressure': 7, 'web-flange-ratio': 8, 'below 0.87': 9,
'between 0.87 and 1': 10, 'above 1': 11}
'''
stf_type = {'T-bar': 1,'T': 1, 'L-bulb': 2, 'Angle': 3, 'Flatbar': 4, 'FB': 4, 'L': 3}
stf_end = {'Cont': 1, 'C':1 , 'Sniped': 2, 'S': 2}
field_type = {'Integrated': 1,'Int': 1, 'Girder - long': 2,'GL': 2, 'Girder - trans': 3, 'GT': 3}
up_boundary = {'SS': 1, 'CL': 2}
map_boundary = {'Continuous': 'C', 'Sniped': 'S'}
sig_x1 = self._sigma_x1
sig_x2 = self._sigma_x2
if sig_x1 * sig_x2 >= 0:
sigxd = sig_x1 if abs(sig_x1) > abs(sig_x2) else sig_x2
else:
sigxd = max(sig_x1, sig_x2)
if self._puls_sp_or_up == 'SP':
if csr == False:
this_field = [self._span * 1000, self._spacing * 1000, self._plate_th * 1000, self._web_height * 1000,
self._web_th * 1000, self._flange_width * 1000, self._flange_th * 1000, self._mat_yield / 1e6,
self._mat_yield / 1e6, sigxd, self._sigma_y1, self._sigma_y2, self._tauxy,
design_lat_press/1000, stf_type[self._stiffener_type],
stf_end[map_boundary[self._puls_stf_end]]]
else:
this_field = [self._span * 1000, self._spacing * 1000, self._plate_th * 1000, self._web_height * 1000,
self._web_th * 1000, self._flange_width * 1000, self._flange_th * 1000, self._mat_yield / 1e6,
self._mat_yield / 1e6, sigxd, self._sigma_y1, self._sigma_y2, self._tauxy,
design_lat_press/1000, stf_type[self._stiffener_type],
stf_end[map_boundary[self._puls_stf_end]],
field_type[self._puls_boundary]]
else:
ss_cl_list = list()
for letter_i in self._puls_up_boundary:
if letter_i == 'S':
ss_cl_list.append(up_boundary['SS'])
else:
ss_cl_list.append(up_boundary['CL'])
b1, b2, b3, b4 = ss_cl_list
if csr == False:
this_field = [self._span * 1000, self._spacing * 1000, self._plate_th * 1000, self._mat_yield / 1e6,
sigxd, self._sigma_y1, self._sigma_y2, self._tauxy, design_lat_press/1000,
b1, b2, b3, b4]
else:
this_field = [self._span * 1000, self._spacing * 1000, self._plate_th * 1000, self._mat_yield / 1e6,
sigxd, self._sigma_y1, self._sigma_y2, self._tauxy, design_lat_press/1000,
field_type[self._puls_boundary], b1, b2, b3, b4]
if alone:
return [this_field,]
else:
return this_field
class CalcScantlings(Structure):
'''
This Class does the calculations for the plate fields.
Input is a structure object, same as for the structure class.
The class inherits from Structure class.
'''
def __init__(self, main_dict, lat_press = True, category = 'secondary'):
super(CalcScantlings,self).__init__(main_dict=main_dict)
self.lat_press = lat_press
self.category = category
self._need_recalc = True
@property
def need_recalc(self):
return self._need_recalc
@need_recalc.setter
def need_recalc(self, val):
self._need_recalc = val
def get_results_for_report(self,lat_press=0):
'''
Returns a string for the report.
:return:
'''
buc = [round(res,1) for res in self.calculate_buckling_all(design_lat_press=lat_press)]
return 'Minimum section modulus:'\
+str(int(self.get_dnv_min_section_modulus(design_pressure_kpa=lat_press)*1000**3))\
+'mm^3 '+' Minium plate thickness: '\
+str(round(self.get_dnv_min_thickness(design_pressure_kpa=lat_press),1))+\
' Buckling results: eq7_19: '+str(buc[0])+' eq7_50: '+str(buc[1])+ ' eq7_51: '\
+str(buc[2])+ ' eq7_52: '+str(buc[3])+ ' eq7_53: '+str(buc[4])
def calculate_slamming_plate(self, slamming_pressure, red_fac = 1):
''' Slamming pressure input is Pa '''
ka1 = 1.1
ka2 = min(max(0.4, self._spacing / self._span), 1)
ka = math.pow(ka1 - 0.25*ka2,2)
sigmaf = self._mat_yield/1e6 # MPa
psl = red_fac * slamming_pressure/1000 # kPa
Cd = 1.5
return 0.0158*ka*self._spacing*1000*math.sqrt(psl/(Cd*sigmaf))
def calculate_slamming_stiffener(self, slamming_pressure, angle = 90, red_fac = 1):
tk = 0
psl = slamming_pressure / 1000 # kPa
Pst = psl * red_fac # Currently DNV does not use psl/2 for slamming.
sigmaf = self._mat_yield / 1e6 # MPa
hw, twa, tp, tf, bf, s = [(val - tk) * 1000 for val in [self._web_height, self._web_th, self._plate_th,
self._flange_th, self._flange_width, self._spacing]]
ns = 2
tau_eH = sigmaf/math.sqrt(3)
h_stf = (self._web_height+self._flange_th)*1000
f_shr = 0.7
lbdg = self._span
lshr = self._span - self._spacing/4000
dshr = h_stf + tp if 75 <= angle <= 90 else (h_stf + tp)*math.sin(math.radians(angle))
tw = (f_shr*Pst*s*lshr)/(dshr*tau_eH)
if self._web_th*1000 < tw:
return {'tw_req': tw, 'Zp_req':None}
fpl = 8* (1+(ns/2))
Zp_req = (1.2*Pst*s*math.pow(lbdg,2)/(fpl*sigmaf)) + \
(ns*(1-math.sqrt(1-math.pow(tw/twa,2)))*hw*tw*(hw+tp))/8000
return {'tw_req': tw, 'Zp_req':Zp_req}
def check_all_slamming(self, slamming_pressure, stf_red_fact = 1, pl_red_fact = 1):
''' A summary check of slamming '''
pl_chk = self.calculate_slamming_plate(slamming_pressure, red_fac= pl_red_fact)
if self._plate_th*1000 < pl_chk:
chk1 = pl_chk / self._plate_th*1000
return False, chk1
stf_res = self.calculate_slamming_stiffener(slamming_pressure, red_fac = stf_red_fact)
#print('Slamming checked')
if self._web_th*1000 < stf_res['tw_req']:
chk2 = stf_res['tw_req'] / self._web_th*1000
return False, chk2
if stf_res['Zp_req'] is not None:
eff_pl_sec_mod = self.get_net_effective_plastic_section_modulus()
if eff_pl_sec_mod < stf_res['Zp_req']:
chk3 = stf_res['Zp_req']/eff_pl_sec_mod
return False, chk3
return True, None
def get_net_effective_plastic_section_modulus(self, angle = 90):
''' Calculated according to Rules for classification: Ships — DNVGL-RU-SHIP Pt.3 Ch.3. Edition July 2017,
page 83 '''
tk = 0
angle_rad = math.radians(angle)
hw, tw, tp, tf, bf = [(val - tk) * 1000 for val in [self._web_height, self._web_th, self._plate_th, self._flange_th,
self._flange_width]]
h_stf = (self._web_height+self._flange_th)*1000
de_gr = 0
tw_gr = self._web_th*1000
hf_ctr = h_stf-0.5*tf if self.get_stiffener_type() not in ['L','L-bulb'] else h_stf - de_gr - 0.5*tf
bf_ctr = 0 if self.get_stiffener_type() == 'T' else 0.5*(tf - tw_gr)
beta = 0.5
gamma = (1 + math.sqrt(3+12*beta))/4
Af = 0 if self.get_stiffener_type() == 'FB' else bf*tf
if 75 <= angle <= 90:
zpl = (hw*tw*(hw+tp)/2000) + ( (2*gamma-1) * Af * ((hf_ctr + tp/2)) / 1000)
elif angle < 75:
zpl = (hw*tw*(hw+tp)/2000)+\
( (2*gamma-1) * Af * ((hf_ctr + tp/2) * math.sin(angle_rad) - bf_ctr*math.cos(angle_rad)) / 1000)
return zpl
def get_dnv_min_section_modulus(self, design_pressure_kpa, printit = False):
''' Section modulus according to DNV rules '''
design_pressure = design_pressure_kpa
fy = self._mat_yield / 1e6
fyd = fy/self._mat_factor
sigma_y = self._sigma_y2 + (self._sigma_y1-self._sigma_y2)\
*(min(0.25*self._span,0.5*self._spacing)/self._span)
sig_x1 = self._sigma_x1
sig_x2 = self._sigma_x2
if sig_x1 * sig_x2 >= 0:
sigxd = sig_x1 if abs(sig_x1) > abs(sig_x2) else sig_x2
else:
sigxd =max(sig_x1 , sig_x2)
sigma_jd = math.sqrt(math.pow(sigxd,2)+math.pow(sigma_y,2)-
sigxd*sigma_y+3*math.pow(self._tauxy,2))
sigma_pd2 = fyd-sigma_jd # design_bending_stress_mpa
kps = self._stf_kps # 1 is clamped, 0.9 is simply supported.
km_sides = min(self._km1,self._km3) # see table 3 in DNVGL-OS-C101 (page 62)
km_middle = self._km2 # see table 3 in DNVGL-OS-C101 (page 62)
Zs = ((math.pow(self._span, 2) * self._spacing * design_pressure) /
(min(km_middle, km_sides) * (sigma_pd2) * kps)) * math.pow(10, 6)
if printit:
print('Sigma y1', self._sigma_y1, 'Sigma y2', self._sigma_y2, 'Sigma x', self._sigma_x1, 'Pressure', design_pressure)
return max(math.pow(15, 3) / math.pow(1000, 3), Zs / math.pow(1000, 3))
def get_dnv_min_thickness(self, design_pressure_kpa):
'''
Return minimum thickness in mm
:param design_pressure_kpa:
:return:
'''
design_pressure = design_pressure_kpa
#print(self._sigma_x1)
sigma_y = self._sigma_y2 + (self._sigma_y1-self._sigma_y2)\
*(min(0.25*self._span,0.5*self._spacing)/self._span)
sig_x1 = self._sigma_x1
sig_x2 = self._sigma_x2
if sig_x1 * sig_x2 >= 0:
sigxd = sig_x1 if abs(sig_x1) > abs(sig_x2) else sig_x2
else:
sigxd =max(sig_x1 , sig_x2)
sigma_jd = math.sqrt(math.pow(sigxd,2)+math.pow(sigma_y,2)-
sigxd*sigma_y+3*math.pow(self._tauxy,2))
fy = self._mat_yield / 1000000
fyd = fy/self._mat_factor
sigma_pd1 = min(1.3*(fyd-sigma_jd), fyd)
sigma_pd1 = abs(sigma_pd1)
#print(fyd, sigma_jd, fyd)
if self.category == 'secondary':
t0 = 5
else:
t0 = 7
t_min = (14.3 * t0) / math.sqrt(fyd)
ka = math.pow(1.1 - 0.25 * self._spacing/self._span, 2)
if ka > 1:
ka =1
elif ka<0.72:
ka = 0.72
assert sigma_pd1 > 0, 'sigma_pd1 must be negative | current value is: ' + str(sigma_pd1)
t_min_bend = (15.8 * ka * self._spacing * math.sqrt(design_pressure)) / \
math.sqrt(sigma_pd1 *self._plate_kpp)
if self.lat_press:
return max(t_min, t_min_bend)
else:
return t_min
def get_minimum_shear_area(self, pressure):
'''
Calculating minimum section area according to ch 6.4.4.
Return [m^2]
:return:
'''
#print('SIGMA_X ', self._sigma_x1)
l = self._span
s = self._spacing
fy = self._mat_yield
fyd = (fy/self._mat_factor)/1e6 #yield strength
sig_x1 = self._sigma_x1
sig_x2 = self._sigma_x2
if sig_x1 * sig_x2 >= 0:
sigxd = sig_x1 if abs(sig_x1) > abs(sig_x2) else sig_x2
else:
sigxd =max(sig_x1 , sig_x2)
taupds = 0.577*math.sqrt(math.pow(fyd, 2) - math.pow(sigxd, 2))
As = ((l*s*pressure)/(2*taupds)) * math.pow(10,3)
return As/math.pow(1000,2)
def is_acceptable_sec_mod(self, section_module, pressure):
'''
Checking if the result is accepable.
:param section_module:
:param pressure:
:return:
'''
return min(section_module) >= self.get_dnv_min_section_modulus(pressure)
def is_acceptable_shear_area(self, shear_area, pressure):
'''
Returning if the shear area is ok.
:param shear_area:
:param pressure:
:return:
'''
return shear_area >= self.get_minimum_shear_area(pressure)
def get_plate_efficent_b(self,design_lat_press=0,axial_stress=50,
trans_stress_small=100,trans_stress_large=100):
'''
Simple buckling calculations according to DNV-RP-C201
:return:
'''
#7.2 Forces in the idealised stiffened plate
s = self._spacing #ok
t = self._plate_th #ok
l = self._span #ok
E = 2.1e11 #ok
pSd = design_lat_press*1000
sigy1Sd =trans_stress_large*1e6
sigy2Sd =trans_stress_small*1e6
sigxSd = axial_stress*1e6
fy = self._mat_yield #ok
#7.3 Effective plate width
alphap=0.525*(s/t)*math.sqrt(fy/E) # reduced plate slenderness, checked not calculated with ex
alphac = 1.1*(s/t)*math.sqrt(fy/E) # checked not calculated with example
mu6_9 = 0.21*(alphac-0.2)
if alphac<=0.2: kappa = 1 # eq6.7, all kappa checked not calculated with example
elif 0.2<alphac<2: kappa = (1/(2*math.pow(alphac,2)))*(1+mu6_9+math.pow(alphac,2)
-math.sqrt(math.pow(1+mu6_9+math.pow(alphac,2),2)
-4*math.pow(alphac,2))) # ok
else: kappa=(1/(2*math.pow(alphac,2)))+0.07 # ok
ha = 0.05*(s/t)-0.75
assert ha>= 0,'ha must be larger than 0'
kp = 1 if pSd<=2*((t/s)**2)*fy else 1-ha*((pSd/fy)-2*(t/s)**2)
sigyR=( (1.3*t/l)*math.sqrt(E/fy)+kappa*(1-(1.3*t/l)*math.sqrt(E/fy)))*fy*kp # checked not calculated with example
l1 = min(0.25*l,0.5*s)
sig_min, sig_max = min(sigy1Sd,sigy2Sd),max(sigy1Sd,sigy2Sd) # self-made
sigySd = sig_min+(sig_max-sig_min)*(1-l1/l) # see 6.8, page 15
ci = 1-s/(120*t) if (s/t)<=120 else 0 # checked not calculated with example
Cxs = (alphap-0.22)/math.pow(alphap,2) if alphap > 0.673 else 1 # reduction factor longitudinal
# eq7.16, reduction factor transverse, compression (positive) else tension
Cys = math.sqrt(1-math.pow(sigySd/sigyR,2) + ci*((sigxSd*sigySd)/(Cxs*fy*sigyR))) if sigySd >= 0 \
else min(0.5*(math.sqrt(4-3*math.pow(sigySd/fy,2))+sigySd/fy),1) #ok, checked
#7.7.3 Resistance parameters for stiffeners
return s * Cxs * Cys # 7.3, eq7.13, che
def buckling_local_stiffener(self):
'''
Local requirements for stiffeners. Chapter 9.11.
:return:
'''
epsilon = math.sqrt(235 / (self._mat_yield / 1e6))
if self._stiffener_type in ['L', 'L-bulb']:
c = self._flange_width - self._web_th/2
elif self._stiffener_type == 'T':
c = self._flange_width/2 - self._web_th/2
elif self._stiffener_type == 'FB':
return self._web_height <= 42 * self._web_th * epsilon, self._web_height/(42 * self._web_th * epsilon)
# print(self._web_height, self._web_th, self._flange_width ,self._flange_th )
# print('c:',c, 14 * self._flange_th * epsilon, ' | ', self._web_height, 42 * self._web_th * epsilon)
# print(c <= (14 * self._flange_th * epsilon) and self._web_height <= 42 * self._web_th * epsilon)
# print(c/(14 * self._flange_th * epsilon), self._web_height / (42 * self._web_th * epsilon))
# print('')
return c <= (14 * self._flange_th * epsilon) and self._web_height <= 42 * self._web_th * epsilon, \
max(c/(14 * self._flange_th * epsilon), self._web_height / (42 * self._web_th * epsilon))
def is_acceptable_pl_thk(self, design_pressure):
'''
Checking if the thickness is acceptable.
:return:
'''
return self.get_dnv_min_thickness(design_pressure) <= self._plate_th*1000
class AllStructure():
'''
Calculation of structure
'''
def __init__(self, Plate: CalcScantlings = None, Stiffener: CalcScantlings = None, Girder: CalcScantlings = None,
main_dict = None):
super(AllStructure, self).__init__()
self._Plate = Plate # This contain the stresses
self._Stiffener = Stiffener
self._Girder = Girder
self._lat_press = None
self._v = 0.3
self._E = 2.1e11
self._min_lat_press_adj_span = None if main_dict['minimum pressure in adjacent spans'][0] == 0 else \
main_dict['minimum pressure in adjacent spans'][0]
self._yield = main_dict['material yield'][0]
self._stress_load_factor = main_dict['load factor on stresses'][0]
self._lat_load_factor = main_dict['load factor on pressure'][0]
self._method = main_dict['buckling method'][0]
self._stf_end_support = main_dict['stiffener end support'][0]#'Continuous'
self._girder_end_support = main_dict['girder end support'][0]#'Continuous'
self._tension_field_action = main_dict['tension field'][0]# 'not allowed'
self._stiffed_plate_effective_aginst_sigy = main_dict['plate effective agains sigy'][0] #True
self._buckling_length_factor_stf = None if main_dict['buckling length factor stf'][0] == 0 else \
main_dict['buckling length factor stf'][0]
self._buckling_length_factor_girder = None if main_dict['buckling length factor girder'][0] == 0 else \
main_dict['buckling length factor girder'][0]
self._km3 = main_dict['km3'][0]#12
self._km2 = main_dict['km2'][0]#24
self._stf_dist_between_lateral_supp = None if main_dict['stiffener distance between lateral support'][0] == 0 \
else main_dict['stiffener distance between lateral support'][0]
self._girder_dist_between_lateral_supp = None if main_dict['girder distance between lateral support'][0] == 0 \
else main_dict['girder distance between lateral support'][0]
self._panel_length_Lp = None if main_dict['panel length, Lp'][0] == 0 else main_dict['panel length, Lp'][0]
self._overpressure_side = main_dict['pressure side'][0] # either 'stiffener side', 'plate side', 'both sides'
self._fab_method_stiffener = main_dict['fabrication method stiffener'][0]#'welded'
self._fab_method_girder = main_dict['fabrication method girder'][0]#'welded'
self._calculation_domain = main_dict['calculation domain'][0]
self._need_recalc = True
@property
def need_recalc(self):
return self._need_recalc
@need_recalc.setter
def need_recalc(self, val):
self._need_recalc = val
@property
def lat_press(self):
return self._lat_press
@lat_press.setter
def lat_press(self, val):
self._lat_press = val
@property
def Plate(self):
return self._Plate
@Plate.setter
def Plate(self, val):
self._Plate = val
@property
def Stiffener(self):
return self._Stiffener
@Stiffener.setter
def Stiffener(self, val):
self._Stiffener = val
@property
def Girder(self):
return self._Girder
@Girder.setter
def Girder(self, val):
self._Girder = val
@property
def overpressure_side(self):
return self._overpressure_side
@overpressure_side.setter
def overpressure_side(self, val):
self._overpressure_side = val
@property
def calculation_domain(self):
return self._calculation_domain
@calculation_domain.setter
def calculation_domain(self, val):
self._calculation_domain = val
def get_main_properties(self):
main_dict = dict()
main_dict['minimum pressure in adjacent spans'] = [self._min_lat_press_adj_span, '']
main_dict['material yield'] = [self._yield, 'Pa']
main_dict['load factor on stresses'] = [self._stress_load_factor, '']
main_dict['load factor on pressure'] = [self._lat_load_factor, '']
main_dict['buckling method'] = [self._method, '']
main_dict['stiffener end support'] = [self._stf_end_support, ''] # 'Continuous'
main_dict['girder end support'] = [self._girder_end_support, ''] # 'Continuous'
main_dict['tension field'] = [self._tension_field_action, ''] # 'not allowed'
main_dict['plate effective agains sigy'] = [self._stiffed_plate_effective_aginst_sigy, ''] # True
main_dict['buckling length factor stf'] = [self._buckling_length_factor_stf, '']
main_dict['buckling length factor girder'] = [self._buckling_length_factor_girder, '']
main_dict['km3'] = [self._km3, ''] # 12
main_dict['km2'] = [self._km2, ''] # 24
main_dict['girder distance between lateral support'] = [self._girder_dist_between_lateral_supp, '']
main_dict['stiffener distance between lateral support'] = [self._stf_dist_between_lateral_supp, '']
main_dict['panel length, Lp'] = [self._panel_length_Lp, '']
main_dict['pressure side'] = [self._overpressure_side, ''] # either 'stiffener', 'plate', 'both'
main_dict['fabrication method stiffener'] = [self._fab_method_stiffener, '']
main_dict['fabrication method girder'] = [self._fab_method_girder, '']
main_dict['calculation domain']= [self._calculation_domain, '']
return {'main dict': main_dict, 'Plate': self._Plate.get_structure_prop(),
'Stiffener': None if self._Stiffener is None else self._Stiffener.get_structure_prop(),
'Girder': None if self._Girder is None else self._Girder.get_structure_prop()}
def set_main_properties(self, prop_dict):
main_dict = prop_dict['main dict']
self._min_lat_press_adj_span = None if main_dict['minimum pressure in adjacent spans'][0] == 0 else \
main_dict['minimum pressure in adjacent spans'][0]
self._yield = main_dict['material yield'][0]
self._stress_load_factor = main_dict['load factor on stresses'][0]
self._lat_load_factor = main_dict['load factor on pressure'][0]
self._method = main_dict['buckling method'][0]
self._stf_end_support = main_dict['stiffener end support'][0]#'Continuous'
self._girder_end_support = main_dict['girder end support'][0]#'Continuous'
self._tension_field_action = main_dict['tension field'][0]# 'not allowed'
self._stiffed_plate_effective_aginst_sigy = main_dict['plate effective agains sigy'][0] #True
self._buckling_length_factor_stf = None if main_dict['buckling length factor stf'][0] == 0 else \
main_dict['buckling length factor stf'][0]
self._buckling_length_factor_girder = None if main_dict['buckling length factor girder'][0] == 0 else \
main_dict['buckling length factor girder'][0]
self._km3 = main_dict['km3'][0]#12
self._km2 = main_dict['km2'][0]#24
self._girder_dist_between_lateral_supp = None if main_dict['girder distance between lateral support'][0] in [0, None, ''] else \
main_dict['girder distance between lateral support'][0]
self._stf_dist_between_lateral_supp = None if main_dict['stiffener distance between lateral support'][0] in [0, None, ''] else \
main_dict['stiffener distance between lateral support'][0]
self._panel_length_Lp = None if main_dict['panel length, Lp'][0] == 0 else main_dict['panel length, Lp'][0]
self._overpressure_side = main_dict['pressure side'][0] # either 'stiffener', 'plate', 'both'
self._fab_method_stiffener = main_dict['fabrication method stiffener'][0]#'welded'
self._fab_method_girder = main_dict['fabrication method girder'][0]#'welded'
self._Plate.set_main_properties(prop_dict['Plate'])
if prop_dict['Stiffener'] is not None and self._Stiffener is None:
self._Stiffener = CalcScantlings(prop_dict['Stiffener'])
elif prop_dict['Stiffener'] is not None and self._Stiffener is not None:
self._Stiffener.set_main_properties(prop_dict['Stiffener'])
else:
self._Stiffener = None
if prop_dict['Girder'] is not None and self._Girder is None:
self._Girder = CalcScantlings(prop_dict['Girder'])
elif prop_dict['Girder'] is not None and self._Girder is not None:
self._Girder.set_main_properties(prop_dict['Girder'])
else:
self._Girder = None
self._calculation_domain = main_dict['calculation domain'][0]
def plate_buckling(self, optimizing = False):
'''
Summary
'''
return_dummy = {'Plate': {'Plate buckling': 0},
'Stiffener': {'Overpressure plate side': 0, 'Overpressure stiffener side': 0,
'Resistance between stiffeners': 0, 'Shear capacity': 0},
'Girder': {'Overpressure plate side': 0, 'Overpressure girder side': 0, 'Shear capacity': 0},
'Local buckling': 0}
unstf_pl = self.unstiffened_plate_buckling(optimizing = optimizing)
up_buckling = max([unstf_pl['UF Pnt. 5 Lateral loaded plates'], unstf_pl['UF sjsd'],
max([unstf_pl['UF Longitudinal stress'], unstf_pl['UF transverse stresses'],
unstf_pl['UF Shear stresses'], unstf_pl['UF Combined stresses']])
if all([self._Girder is None, self._Stiffener is None]) else 0])
if optimizing and up_buckling > 1:
return_dummy['Plate']['Plate buckling'] = up_buckling
return return_dummy
if not optimizing:
local_buckling = self.local_buckling()
if self._Stiffener is not None:
stf_pla = self.stiffened_panel(unstf_pl_data=unstf_pl, optimizing=optimizing)
if all([optimizing, type(stf_pla) == list]):
return_dummy['Stiffener'][stf_pla[0]] = stf_pla[1]
return return_dummy
stf_buckling_pl_side = stf_pla['UF Plate side'] if self._stf_end_support == 'Continuous' else \
stf_pla['UF simply supported plate side']
stf_buckling_stf_side = stf_pla['UF Stiffener side'] if self._stf_end_support == 'Continuous' else \
stf_pla['UF simply supported stf side']
stf_plate_resistance = stf_pla['UF Plate resistance']
stf_shear_capacity = stf_pla['UF Shear force']
else:
stf_buckling_pl_side, stf_buckling_pl_side, stf_buckling_stf_side, stf_plate_resistance, \
stf_shear_capacity = 0,0,0,0,0
if self._Girder is not None:
girder = self.girder(unstf_pl_data=unstf_pl, stf_pl_data=stf_pla, optmizing=optimizing)
if all([optimizing, type(girder) == list]):
return_dummy['Girder'][stf_pla[0]] = stf_pla[1]
return return_dummy
girder_buckling_pl_side = girder['UF Cont. plate side'] if self._girder_end_support == 'Continuous' else \
stf_pla['UF Simplified plate side']
girder_buckling_girder_side = girder['UF Cont. girder side'] if self._girder_end_support == 'Continuous' \
else \
stf_pla['UF Simplified girder side']
girder_shear_capacity = girder['UF shear force']
else:
girder_buckling_pl_side, girder_buckling_girder_side, girder_shear_capacity = 0,0,0
return {'Plate': {'Plate buckling': up_buckling},
'Stiffener': {'Overpressure plate side': stf_buckling_pl_side,
'Overpressure stiffener side': stf_buckling_stf_side,
'Resistance between stiffeners': stf_plate_resistance,
'Shear capacity': stf_shear_capacity},
'Girder': {'Overpressure plate side': girder_buckling_pl_side,
'Overpressure girder side': girder_buckling_girder_side,
'Shear capacity': girder_shear_capacity},
'Local buckling': 0 if optimizing else local_buckling}
def unstiffened_plate_buckling(self, optimizing = False):
unstf_pl_data = dict()
E = self._E/1e6
v = self._v
fy = self._yield/1e6
gammaM = self._Plate.get_mat_factor()
t = self._Plate.t
s = self._Plate.s
l = self._Plate.get_span()*1000
tsd = self._Plate.get_tau_xy()
psd = self._lat_press*self._lat_load_factor
sig_x1 = self._Plate.get_sigma_x1()*self._stress_load_factor
sig_x2 = self._Plate.get_sigma_x2()*self._stress_load_factor
sig_y1 = self._Plate.get_sigma_y1() * self._stress_load_factor
sig_y2 = self._Plate.get_sigma_y2() * self._stress_load_factor
if sig_x1 * sig_x2 >= 0:
Use_Smax_x = sxsd = sig_x1 if abs(sig_x1) > abs(sig_x2) else sig_x2
else:
Use_Smax_x = sxsd =max(sig_x1 , sig_x2)
if sig_y1 * sig_y2 >= 0:
Use_Smax_y = sy1sd = sig_y1 if abs(sig_y1) > abs(sig_y2) else sig_y2
else:
Use_Smax_y = sy1sd = max(sig_y1 , sig_y2)
if sig_x1 * sig_x2 >= 0:
Use_Smin_x = sig_x2 if abs(sig_x1) > abs(sig_x2) else sig_x1
else:
Use_Smin_x = min(sig_x1 , sig_x2)
if sig_y1 * sig_y2 >= 0:
Use_Smin_y = sig_y2 if abs(sig_y1) > abs(sig_y2) else sig_y1
else:
Use_Smin_y = min(sig_y1 , sig_y2)
shear_ratio_long = 1 if Use_Smax_x == 0 else Use_Smin_x / Use_Smax_x
shear_ratio_trans = 1 if Use_Smax_y == 0 else Use_Smin_y/Use_Smax_y
Max_vonMises_x = sig_x1 if abs(sig_x1) > abs(sig_x2) else sig_x2
unstf_pl_data['sxsd'] = sxsd
unstf_pl_data['sy1sd'] = sy1sd
l1 = min(l/4, s/2)
if l == 0:
sig_trans_l1 = Use_Smax_y
else:
sig_trans_l1 = Use_Smax_y*(shear_ratio_trans+(1-shear_ratio_trans)*(l-l1)/l)
trans_stress_used = sysd = 0.75*Use_Smax_y if abs(0.75*Use_Smax_y) > abs(Use_Smax_y) else sig_trans_l1
unstf_pl_data['sysd'] = sysd
#Pnt. 5 Lateral loaded plates
sjsd =math.sqrt(math.pow(Max_vonMises_x,2) + math.pow(sysd,2)-Max_vonMises_x*sysd+3*math.pow(tsd,2))
uf_sjsd = sjsd/fy*gammaM
unstf_pl_data['UF sjsd'] = uf_sjsd
#psi_x =max([0,(1-math.pow(sjsd/fy,2))/math.sqrt(1-3/4*math.pow(sysd/fy,2)-3*math.pow(tsd/fy,2))])
psi_x =max([0,(1-math.pow(sjsd/fy,2))/math.sqrt(1-3/4*math.pow(sysd/fy,2)-3*math.pow(tsd/fy,2))]) \
if 1-3/4*math.pow(sysd/fy,2)-3*math.pow(tsd/fy,2) > 0 else 0
psi_x_chk = (1-3/4*math.pow(sy1sd/fy,2)-3*math.pow(tsd/fy,2))>0
psi_y = max([0,(1-math.pow(sjsd/fy,2))/math.sqrt(1-3/4*math.pow(sxsd/fy,2)-3*math.pow(tsd/fy,2))]) \
if 1-3/4*math.pow(sxsd/fy,2)-3*math.pow(tsd/fy,2) > 0 else 0
psi_y_chk = (1 - 3 / 4 * math.pow(sxsd / fy, 2) - 3 * math.pow(tsd / fy, 2)) > 0
if gammaM * s * l == 0:
Psd_max_press = 0
else:
if all([psi_x_chk, psi_y_chk]):
Psd_max_press = (4 * fy / gammaM * math.pow(t / s,2) * (psi_y + math.pow(s / l, 2) * psi_x))
else:
Psd_max_press = -1
if Psd_max_press == 0:
uf_lat_load_pl_press = 0
else:
uf_lat_load_pl_press = 9 if Psd_max_press < 0 else abs(psd/Psd_max_press)
unstf_pl_data['UF Pnt. 5 Lateral loaded plates'] = uf_lat_load_pl_press
#6.2 & 6.6 Longitudinal stress
if shear_ratio_long <= -2:
ksig = "Unknown"
elif 0 <= shear_ratio_long <= 1:
ksig = 8.2 / (1.05 + shear_ratio_long)
elif shear_ratio_long <= -1:
ksig = 7.81 - 6.29 * shear_ratio_long + 9.78 * math.pow(shear_ratio_long, 2)
elif -2 < shear_ratio_long < -1:
ksig = 5.98 * math.pow(1 - shear_ratio_long, 2)
#print(sxsd, sy1sd, tsd, sjsd, uf_lat_load_pl, psi_x, psi_y, uf_lat_load_pl_press, psd, Psd_max_press,ksig)
if t*E == 0:
alpha_p = 0
elif ksig == "Unknown":
alpha_p = 1.05*s/t*math.sqrt(fy/E)
else:
alpha_p = s/t/(28.4*math.sqrt(ksig*235/fy))
Cx =(alpha_p-0.055*(3+max([-2,shear_ratio_long])))/math.pow(alpha_p, 2)
sxRd = Cx*fy/gammaM if not all([sig_x1<0, sig_x2<0]) else 1*fy/gammaM # Corrected 07.08.2023, issue 126
uf_unstf_pl_long_stress = 0 if sxRd == 0 else abs(sxsd/sxRd)
unstf_pl_data['UF Longitudinal stress'] = uf_unstf_pl_long_stress
#print(uf_unstf_pl_long_stress)
#6.3 & 6.8 Transverse stresses:
ha = 0 if t == 0 else max([0,0.05*s/t-0.75])
kp_1_for_Psd = 0 if s == 0 else 2*math.pow(t/s,2)*fy
kp_used = 1-ha*(psd/fy-2*math.pow(t/s,2)) if psd>kp_1_for_Psd else 1
alpha_c = 0 if t*E == 0 else 1.1*s/t*math.sqrt(fy/E)
mu = 0.21*(alpha_c-0.2)
if alpha_c <= 0.2:
kappa = 1
elif 0.2 < alpha_c < 2:
kappa = 0 if alpha_c == 0 else 1/(2*math.pow(alpha_c,2))*(1+mu+math.pow(alpha_c,2)-
math.sqrt(math.pow(1+mu+math.pow(alpha_c,2),2)-
4*math.pow(alpha_c,2)))
elif alpha_c >= 2:
kappa = 0 if alpha_c == 0 else 1/(2*math.pow(alpha_c,2))+0.07
syR = 0 if l*fy == 0 else (1.3*t/l*math.sqrt(E/fy)+kappa*(1-1.3*t/l*math.sqrt(E/fy)))*fy*kp_used
syRd = syR if not all([sig_y1<0, sig_y2<0]) else fy
syRd = syRd/gammaM
uf_unstf_pl_trans_stress = 0 if syRd == 0 else abs(sysd)/syRd
#print(uf_unstf_pl_trans_stress)
unstf_pl_data['syR'] = syR
unstf_pl_data['syRd'] = syRd
unstf_pl_data['UF transverse stresses'] = uf_unstf_pl_trans_stress
#6.4 Shear stress
if l >= s:
kl = 0 if l == 0 else 5.34+4*math.pow(s/l,2)
else:
kl = 0 if l == 0 else 5.34*math.pow(s/l,2)+4
unstf_pl_data['kl'] = kl
alpha_w = 0 if t*E*kl == 0 else 0.795*s/t*math.sqrt(fy/E/kl)
if alpha_w <= 0.8:
Ctau = 1
elif 0.8 < alpha_w < 1.25:
Ctau = 1-0.675*(alpha_w-0.8)
else:
Ctau = 0 if alpha_w == 0 else 0.9/alpha_w
tauRd = Ctau*fy/gammaM/math.sqrt(3)
uf_unstf_pl_shear_stress = 0 if tauRd == 0 else tsd/tauRd
unstf_pl_data['UF Shear stresses'] = uf_unstf_pl_shear_stress
#print(uf_unstf_pl_shear_stress)
#6.5 Combined stresses
if alpha_w <= 0.8:
Ctaue = 1
elif 0.8 < alpha_w < 1.25:
Ctaue = 1-0.8*(alpha_w-0.8)
else:
Ctaue = 0 if alpha_w == 0 else 1/math.pow(alpha_w,2)
tauRd_comb = Ctaue*fy/gammaM/math.sqrt(3)
tauRd_comb = tauRd if sysd>0 else tauRd
if s/t <= 120:
ci = 0 if t == 0 else 1-s/120/t
elif s/t > 120:
ci = 0
else:
ci = 1
sxRd_comb = fy/gammaM if all([sig_x1<0, sig_x2<0]) else sxRd
syRd_comb = syRd
sxsd_div_sxrd = 0 if sxRd_comb == 0 else sxsd/sxRd_comb
sysd_div_syrd = 0 if syRd_comb == 0 else sysd / syRd_comb
tausd_div_taurd = 0 if tauRd_comb == 0 else tsd/tauRd_comb
comb_req = math.pow(sxsd_div_sxrd, 2)+math.pow(sysd_div_syrd, 2)-ci*sxsd_div_sxrd*sysd_div_syrd+\
math.pow(tausd_div_taurd, 2)
uf_unstf_pl_comb_stress = comb_req
unstf_pl_data['UF Combined stresses'] = uf_unstf_pl_comb_stress
return unstf_pl_data
def stiffened_panel(self, unstf_pl_data = None, optimizing = False):
E = self._E / 1e6
v = self._v
G = E/(2*(1+v))
fy = self._yield/1e6
gammaM = self._Plate.get_mat_factor()
t = self._Plate.t
s = self._Plate.s
l = self._Plate.get_span() * 1000
sig_x1 = self._Plate.get_sigma_x1() * self._stress_load_factor
sig_x2 = self._Plate.get_sigma_x2() * self._stress_load_factor
sig_y1 = self._Plate.get_sigma_y1() * self._stress_load_factor
sig_y2 = self._Plate.get_sigma_y2() * self._stress_load_factor
Lg = self._Plate.get_lg()*1000
stf_pl_data = dict()
sxsd = 0 if self._method == 2 else unstf_pl_data['sxsd']
sy1sd = unstf_pl_data['sy1sd']
sysd = 0 if self._method == 2 else unstf_pl_data['sysd']
tsd = self._Plate.get_tau_xy() * self._stress_load_factor
psd = self._lat_press * self._lat_load_factor
psd_min_adj = psd if self._min_lat_press_adj_span is None else\
self._min_lat_press_adj_span*self._lat_load_factor
shear_ratio_long = 1
shear_ratio_trans = 1
#Pnt.7: Buckling of stiffened plates
Vsd = psd*s*l/2
tw_req = Vsd*gammaM*math.sqrt(3)/(fy*self._Stiffener.hw)
Anet = (self._Stiffener.hw + self._Stiffener.tf) * self._Stiffener.tw# + self._Stiffener.b*self._Stiffener.tf
Vrd = Anet*fy/(gammaM*math.sqrt(3))
Vsd_div_Vrd = Vsd/Vrd
stf_pl_data['UF Shear force'] = Vsd_div_Vrd
if optimizing and Vsd_div_Vrd > 1:
return ['UF Shear force', Vsd_div_Vrd]
# 7.2 Forces in idealised stiffened plate
Iy = Is = self._Stiffener.get_moment_of_intertia()*1000**4
stf_pl_data['Is'] = Is
kc = 0 if t*s == 0 else 2*(1+math.sqrt(1+10.9*Is/(math.pow(t,3)*s)))
mc = 13.3 if self._stf_end_support == 'Continuous' else 8.9
# 7.3 Effective plate width
syR = unstf_pl_data['syR']
Cys = 0.5*(math.sqrt(4-3*math.pow(sysd/fy,2))+sysd/fy)
alphap = 0 if t*E == 0 else 0.525 * (s / t) * math.sqrt(fy / E) # reduced plate slenderness, checked not calculated with ex
Cxs = (alphap - 0.22) / math.pow(alphap, 2) if alphap > 0.673 else 1
stf_pl_data['alphap'] = alphap
stf_pl_data['Cxs'] = Cxs
if sysd < 0:
Cys = min(Cys, 1)
else:
if s / t <= 120:
ci = 0 if t == 0 else 1-s / 120 / t
else:
ci = 0
cys_chk = 1 - math.pow(sysd / syR, 2) + ci * ((sxsd * sysd) / (Cxs * fy * syR))
Cys =0 if cys_chk < 0 else math.sqrt(cys_chk)
stf_pl_data['Cys_comp'] = Cys
se_div_s = Cxs * Cys
se = s * se_div_s
zp = self._Stiffener.get_cross_section_centroid_with_effective_plate(se=se/1000)*1000 - t / 2 # ch7.5.1 page 19
zt = (self._Stiffener.hw+self._Stiffener.tf) - zp + t/2
Iy = self._Stiffener.get_moment_of_intertia(efficent_se=se/1000)*1000**4
Weff = 0.0001 if zt == 0 else Iy/zt
Co= 0 if kc*E*t*s == 0 else Weff*fy*mc/(kc*E*math.pow(t,2)*s)
Po = 0 if all([sig_y1 < 0, sig_y2 < 0]) else (0.6+0.4*shear_ratio_trans)*Co*sy1sd \
if shear_ratio_trans >-1.5 else 0
qsd_press = (psd+abs(Po))*s
qsd_opposite = abs(Po)*s if psd < Po else 0
'''
1 Overpressure on Stiffener Side
2 Overpressure on Plate Side
3 Overpr. may occur on both sides
'''
qsd_plate_side = qsd_opposite if self._overpressure_side == 'stiffener side' else qsd_press
qsd_stf_side = qsd_opposite if self._overpressure_side == 'plate side' else qsd_press
kl = unstf_pl_data['kl']
tcrl = 0 if s == 0 else kl*0.904*E*math.pow(t/s,2)
if l<= Lg:
kg = 0 if Lg == 0 else 5.34+4*math.pow(l/Lg,2)
else:
kg = 0 if Lg == 0 else 5.34*math.pow(l / Lg, 2)+4
tcrg = 0 if l == 0 else kg*0.904*E*math.pow(t/l,2)
if self._tension_field_action == 'allowed' and tsd>(tcrl/gammaM):
ttf = tsd-tcrg
else:
ttf = 0
As = self._Stiffener.tw*self._Stiffener.hw + self._Stiffener.b*self._Stiffener.tf
NSd = sxsd*(As+s*t)+ttf*s*t
#7.4 Resistance of plate between stiffeners
ksp = math.sqrt(1-3*math.pow(tsd/fy,2)) if tsd < (fy/math.sqrt(3)) else 0
syrd_unstf = unstf_pl_data['syRd'] * ksp
tsd_7_4 = fy/(math.sqrt(3)*gammaM)
uf_stf_panel_res_bet_plate = max([sysd/syrd_unstf if all([syrd_unstf >0, sysd > 0]) else 0, tsd/tsd_7_4])
stf_pl_data['UF Plate resistance'] = uf_stf_panel_res_bet_plate
if optimizing and uf_stf_panel_res_bet_plate > 1:
return ['UF Plate resistance', uf_stf_panel_res_bet_plate]
#7.5 Characteristic buckling strength of stiffeners
fEpx = 0 if s == 0 else 3.62*E*math.pow(t/s,2) # eq 7.42, checked, ok
fEpy = 0 if s == 0 else 0.9*E*math.pow(t/s,2) # eq 7.43, checked, ok
fEpt = 0 if s == 0 else 5.0*E*math.pow(t/s,2) # eq 7.44, checked, ok
c = 0 if l == 0 else 2-(s/l) # eq 7.41, checked, ok
sjSd = math.sqrt(
math.pow(max([sxsd, 0]), 2) + math.pow(max([sysd, 0]), 2) - max([sxsd, 0]) * max([sysd, 0]) +
3 * math.pow(tsd, 2)) # eq 7.38, ok
alphae = math.sqrt( (fy/sjSd) * math.pow(math.pow(max([sxsd, 0])/fEpx, c)+
math.pow(max([sysd, 0])/fEpy, c)+
math.pow(abs(tsd)/fEpt, c), 1/c)) # eq 7.40
fep = fy / math.sqrt(1+math.pow(alphae,4)) # eq 7.39
eta = min(sjSd/fep, 1) # eq. 7.377
C = 0 if self._Stiffener.tw == 0 else (self._Stiffener.hw / s) * math.pow(t / self._Stiffener.tw, 3) * \
math.sqrt((1 - eta)) # e 7.36, checked ok
beta = (3*C+0.2)/(C+0.2) # eq 7.35
It = self._Stiffener.get_torsional_moment_venant(efficient_flange=False)
Ipo = self._Stiffener.get_polar_moment()
Iz = self._Stiffener.get_Iz_moment_of_inertia()
def red_prop():
tw_red =max(0,self._Stiffener.tw*(1-Vsd_div_Vrd))
Atot_red = As+se*t-self._Stiffener.hw*(self._Stiffener.tw - tw_red )
It_red = self._Stiffener.get_torsional_moment_venant(reduced_tw=tw_red, efficient_flange=False)
Ipo_red = self._Stiffener.get_polar_moment(reduced_tw=tw_red )
#Iz = self._Stiffener.get_Iz_moment_of_inertia(reduced_tw=tw)
#Iz_red = self._Stiffener.get_moment_of_intertia(efficent_se=se/1000, reduced_tw=tw_red)
Iy_red = self._Stiffener.get_moment_of_intertia(efficent_se=se / 1000, reduced_tw=tw_red) * 1000 ** 4
zp_red = self._Stiffener.get_cross_section_centroid_with_effective_plate(se / 1000, reduced_tw=tw_red ) \
* 1000 - t / 2 # ch7.5.1 page 19
zt_red = (self._Stiffener.hw + self._Stiffener.tf) - zp_red + t/2 # ch 7.5.1 page 19
Wes_red = 0.0001 if zt_red == 0 else Iy_red/zt_red
Wep_red = 0.0001 if zp_red == 0 else Iy_red/zp_red
return {'tw':tw_red , 'Atot': Atot_red , 'It': It_red , 'Ipo': Ipo_red , 'zp': zp_red ,
'zt': zt_red , 'Wes': Wes_red , 'Wep': Wep_red, 'Iy': Iy_red}
hs = self._Stiffener.hw / 2 if self._Stiffener.get_stiffener_type() == 'FB' else \
self._Stiffener.hw + self._Stiffener.tf / 2
def lt_params(lT):
if Ipo*lT>0:
fET = beta*G*It/Ipo+math.pow(math.pi,2)*E*math.pow(hs,2)*Iz/(Ipo*math.pow(lT,2)) #NOTE, beta was missed from above, added. 23.08.2022
else:
fET = 0.001
alphaT = 0 if fET == 0 else math.sqrt(fy/fET)
mu = 0.35*(alphaT-0.6)
fT_div_fy = (1+mu+math.pow(alphaT,2)-math.sqrt(math.pow(1+mu+math.pow(alphaT,2),2)-
4*math.pow(alphaT,2)))/(2*math.pow(alphaT,2))
fT = fy*fT_div_fy if alphaT > 0.6 else fy
#print(fET, alphaT, mu, fT)
return {'fEt': fET, 'alphaT': alphaT, 'mu': mu, 'fT_div_fy': fT_div_fy, 'fT': fT}
zp = self._Stiffener.get_cross_section_centroid_with_effective_plate(se/1000)*1000 - t / 2 # ch7.5.1 page 19
zt = (self._Stiffener.hw + self._Stiffener.tf) - zp + t/2 # ch 7.5.1 page 19
fr = fy
if Vsd_div_Vrd < 0.5:
Wes = 0.0001 if zt == 0 else Iy/zt
Wep = 0.0001 if zp == 0 else Iy/zp
Ae = As + se * t
else:
red_param = red_prop()
Wes = red_param['Wes']
Wep = red_param['Wep']
Ae = red_param['Atot']
Wmin = min([Wes, Wep])
pf = 0.0001 if l*s*gammaM == 0 else 12*Wmin*fy/(math.pow(l,2)*s*gammaM)
if self._buckling_length_factor_stf is None:
if self._stf_end_support == 'Continuous':
lk = l*(1-0.5*abs(psd_min_adj/pf))
else:
lk = l
else:
lk = self._buckling_length_factor_stf * l
ie = 0.0001 if As+se*t == 0 else math.sqrt(Iy/(As+se*t))
fE = 0.0001 if lk == 0 else math.pow(math.pi,2)*E*math.pow(ie/lk,2)
fk_dict = dict()
fr_dict = dict()
#Plate side
zp = zp
fr = fy
fr_dict['plate'] = fr
alpha = math.sqrt(fr/fE)
mu = 0 if ie == 0 else (0.34+0.08*zp/ie)*(alpha-0.2)
fk_div_fr = (1+mu+math.pow(alpha,2)-math.sqrt(math.pow(1+mu+math.pow(alpha,2),2)-4*math.pow(alpha,2)))/(2*math.pow(alpha,2))
fk = fk_div_fr*fr if alpha > 0.2 else fr
fk_dict['plate'] = fk
#Stiffener side
for lT in [int(l if self._stf_dist_between_lateral_supp is None else self._stf_dist_between_lateral_supp),
int(0.4*l if self._stf_dist_between_lateral_supp is None else self._stf_dist_between_lateral_supp),
int(0.8*l if self._stf_dist_between_lateral_supp is None else self._stf_dist_between_lateral_supp)]:
params = lt_params(lT)
fr = params['fT'] if params['alphaT']>0.6 else fy
fr_dict[lT] = fr
alpha = math.sqrt(fr / fE)
mu = 0 if ie == 0 else (0.34 + 0.08 * zt / ie) * (alpha - 0.2)
fk_div_fr = (1 + mu + math.pow(alpha, 2) - math.sqrt(
math.pow(1 + mu + math.pow(alpha, 2), 2) - 4 * math.pow(alpha, 2))) / (2 * math.pow(alpha, 2))
fk = fk_div_fr * fr if alpha > 0.2 else fr
fk_dict[lT] = fk
#7.7.3 Resistance parameters for stiffeners
NRd = 0.0001 if gammaM == 0 else Ae * (fy / gammaM) # eq7.65, checked ok
NksRd = Ae * (fk_dict[int(l if self._stf_dist_between_lateral_supp is None else self._stf_dist_between_lateral_supp)] / gammaM) #eq7.66
NkpRd = Ae * (fk_dict['plate'] / gammaM) # checked ok
Ms1Rd = Wes * (fr_dict[int(0.4*l if self._stf_dist_between_lateral_supp is None else
self._stf_dist_between_lateral_supp)] / gammaM) # ok
Ms2Rd = Wes * (fr_dict[int(0.8*l if self._stf_dist_between_lateral_supp is None else
self._stf_dist_between_lateral_supp)] / gammaM) # eq7.69 checked ok
MstRd = Wes*(fy/gammaM) #eq7.70 checked ok
MpRd = Wep*(fy/gammaM) #eq7.71 checked ok
Ne = ((math.pow(math.pi,2))*E*Ae)/(math.pow(lk/ie,2))# eq7.72 , checked ok
#7.6 Resistance of stiffened panels to shear stresses
Ip = math.pow(t,3)*s/10.9
tcrs = (36 * E / (s * t * math.pow(l, 2))) * ((Ip * math.pow(Is, 3)) ** 0.25)
tRdy = fy/math.sqrt(3)/gammaM
tRdl = tcrl/gammaM
tRds = tcrs/gammaM
tRd = min([tRdy,tRdl,tRds])
u = 0 if all([tsd>(tcrl/gammaM), self._tension_field_action == 'allowed']) else math.pow(tsd/tRd, 2)
zstar = zp
if self._stf_end_support != 'Continuous':
#Lateral pressure on plate side:
#7.7.2 Simple supported stiffener (sniped stiffeners)
#Lateral pressure on plate side:
stf_pl_data['UF Stiffener side'] = 0
stf_pl_data['UF Plate side'] = 0
uf_7_58 = NSd/NksRd-2*NSd/NRd +((qsd_plate_side*math.pow(l,2)/8)+NSd*zstar)/(MstRd*(1-NSd/Ne))+u
uf_7_59 = NSd/NkpRd+((qsd_plate_side*math.pow(l,2)/8)+NSd*zstar)/(MpRd*(1-NSd/Ne))+u
uf_max_simp_pl = max([uf_7_58, uf_7_59])
stf_pl_data['UF simply supported plate side'] = uf_max_simp_pl
#Lateral pressure on stiffener side:
uf_7_60 = NSd/NksRd+((qsd_stf_side*math.pow(l,2)/8)-NSd*zstar)/(Ms2Rd*(1-NSd/Ne))+u
uf_7_61 = NSd/NkpRd-2*NSd/NRd+((qsd_stf_side*math.pow(l,2)/8)-NSd*zstar)/(MpRd*(1-NSd/Ne))+u
test_qsd_l = qsd_stf_side*math.pow(l,2)/8 >= NSd*zstar
uf_7_62 = NSd/NksRd-2*NSd/NRd+(NSd*zstar-(qsd_stf_side*math.pow(l,2)/8))/(MstRd*(1-NSd/Ne))+u
uf_7_63 = NSd/NkpRd+(NSd*zstar-(qsd_stf_side*math.pow(l,2)/8))/(MpRd*(1-NSd/Ne))+u
uf_max_simp_stf = max([0,uf_7_62,uf_7_63]) if not test_qsd_l else max([0,uf_7_60,uf_7_61])
stf_pl_data['UF simply supported stf side'] = uf_max_simp_stf
else:
stf_pl_data['UF simply supported stf side'] = 0
stf_pl_data['UF simply supported plate side'] = 0
#7.7.1 Continuous stiffeners
M1Sd_pl = abs(qsd_plate_side)*math.pow(l,2)/self._km3
M2Sd_pl = abs(qsd_plate_side)*math.pow(l,2)/self._km2
M1Sd_stf = abs(qsd_stf_side) * math.pow(l, 2) / self._km3
M2Sd_stf = abs(qsd_stf_side) * math.pow(l, 2) / self._km2
M1Sd_max = max([M1Sd_pl, M1Sd_stf])
M2Sd_max = max([M2Sd_pl, M2Sd_stf])
# Lateral pressure on plate side:
#print(M1Sd_pl, M2Sd_pl, M1Sd_stf,M2Sd_stf, qsd_stf_side, qsd_plate_side)
from scipy.optimize import minimize
def iteration_min_uf_pl_side(x):
eq7_50 = NSd/NksRd+(M1Sd_pl-NSd*x)/(Ms1Rd*(1-NSd/Ne))+u
eq7_51 = NSd/NkpRd-2*NSd/NRd +(M1Sd_pl-NSd*x)/(MpRd*(1-NSd/Ne))+u
eq7_52 = NSd/NksRd-2*NSd/NRd+(M2Sd_pl+NSd*x)/(MstRd*(1-NSd/Ne))+u
eq7_53 = NSd/NkpRd+(M2Sd_pl+NSd*x)/(MpRd*(1-NSd/Ne))+u
#print(zstar, eq7_50, eq7_51,eq7_52,eq7_53,max([eq7_50, eq7_51,eq7_52,eq7_53]))
return max(eq7_50, eq7_51, eq7_52, eq7_53)
res_iter_pl = minimize(iteration_min_uf_pl_side, 0, bounds=[[-zt+self._Stiffener.tf/2,zp]])
if type(res_iter_pl.fun) == list:
stf_pl_data['UF Plate side'] = res_iter_pl.fun[0]
else:
stf_pl_data['UF Plate side'] = res_iter_pl.fun
# Lateral pressure on stiffener side:
# max_lfs = []
# ufs = []
def iteration_min_uf_stf_side(x):
eq7_54 = NSd/NksRd-2*NSd/NRd +(M1Sd_stf+NSd*x)/(MstRd*(1-NSd/Ne))+u
eq7_55 = NSd/NkpRd+(M1Sd_stf+NSd*x)/(MpRd*(1-NSd/Ne))+u
eq7_56 = NSd/NksRd+(M2Sd_stf-NSd*x)/(Ms2Rd*(1-NSd/Ne))+u
eq7_57 = NSd/NkpRd-2*NSd/NRd+(M2Sd_stf-NSd*x)/(MpRd*(1-NSd/Ne))+u
return max(eq7_54, eq7_55, eq7_56, eq7_57)
res_iter_stf = minimize(iteration_min_uf_stf_side, 0, bounds=[[-zt+self._Stiffener.tf/2,zp]])
if type(res_iter_stf.fun) == list:
stf_pl_data['UF Stiffener side'] = res_iter_stf.fun[0]
else:
stf_pl_data['UF Stiffener side'] = res_iter_stf.fun
return stf_pl_data
def girder(self, unstf_pl_data = None, stf_pl_data = None, optmizing = False):
'''
Buckling of girder.
'''
girder_data = dict()
E = self._E / 1e6
v = self._v
G = E/(2*(1+v))
fy = self._yield/1e6
gammaM = self._Plate.get_mat_factor()
t = self._Plate.t
s = self._Plate.s
l = self._Plate.get_span() * 1000
hw = self._Girder.hw
tsd = self._Plate.get_tau_xy() * self._stress_load_factor
psd = self._lat_press
sig_x1 = self._Plate.get_sigma_x1() * self._stress_load_factor
sig_x2 = self._Plate.get_sigma_x2() * self._stress_load_factor
sig_y1 = self._Plate.get_sigma_y1() * self._stress_load_factor
sig_y2 = self._Plate.get_sigma_y2() * self._stress_load_factor
sxsd = 0 if self._method == 2 else unstf_pl_data['sxsd']
sy1sd = unstf_pl_data['sy1sd']
sysd = 0 if self._method == 2 else unstf_pl_data['sysd']
tsd = self._Plate.get_tau_xy() * self._stress_load_factor
psd = self._lat_press * self._lat_load_factor
psd_min_adj = psd if self._min_lat_press_adj_span is None else\
self._min_lat_press_adj_span*self._lat_load_factor
Lg = self._Plate.get_lg()*1000
Ltg = Lg if self._girder_dist_between_lateral_supp == None else self._girder_dist_between_lateral_supp
Lp = 0 if self._panel_length_Lp is None else self._panel_length_Lp
#Pnt.8: Buckling of Girders
#7.8 Check for shear force
Vsd = psd*l*Lg/2
tw_req = Vsd*gammaM*math.sqrt(3)/(fy*self._Girder.hw)
Anet = self._Girder.hw * self._Girder.tw + self._Girder.tw*self._Girder.tf
Vrd = Anet*fy/(gammaM*math.sqrt(3))
Vsd_div_Vrd = Vsd/Vrd
girder_data['UF shear force'] = Vsd_div_Vrd
if optmizing and Vsd_div_Vrd > 1:
return ['UF shear force', Vsd_div_Vrd]
CHK_account_for_interaction = Vsd < 0.5*Vrd
#8.2 Girder forces
As = self._Stiffener.tw*self._Stiffener.hw + self._Stiffener.b*self._Stiffener.tf
Ag = self._Girder.tw*self._Girder.hw + self._Girder.b*self._Girder.tf
sysd = 0 if self._method == 2 else unstf_pl_data['sysd']
NySd = sysd*(Ag+l*t)
Is = stf_pl_data['Is']
tcel = 18*E/(t*math.pow(l,2))*math.pow(t*Is/s, 0.75)
tceg = 0 if Lp == 0 else tcel*math.pow(l,2)/math.pow(Lp,2)
alpha_t1 = 0 if Lp == 0 else math.sqrt(0.6*fy/tceg)
alpha_t2 = math.sqrt(0.6*fy/tcel)
tcrg = 0.6*fy/math.pow(alpha_t1,2) if alpha_t1 > 1 else 0.6*fy
tcrl = 0.6*fy/math.pow(alpha_t2,2) if alpha_t2 > 1 else 0.6*fy
tcrg = tcrg if self._stf_end_support == 'Continuous' else 0
#8.4 Effective width of girders
#Method 1:
alphap = stf_pl_data['alphap']
Cxs = stf_pl_data['Cxs']
fkx = Cxs*fy
CxG = math.sqrt(1-math.pow(sxsd/fkx,2)) if sxsd<fkx else 0
if 4-math.pow(Lg/l,2) != 0:
CyG_tens = 1 if Lg > 2*l else Lg/(l*math.sqrt(4-math.pow(Lg/l,2)))
else:
CyG_tens = 1
CyG_comp = 0 if l*alphap == 0 else stf_pl_data['Cys_comp']
CyG = min([1,CyG_tens]) if sy1sd<0 else min([1, CyG_comp])
CtG = math.sqrt(1-3*math.pow(tsd/fy,2)) if tsd<fy/math.sqrt(3) else 0
le_div_l_method1 = CxG*CyG*CtG
le_method1 = l*le_div_l_method1
lim_sniped_or_cont = 0.3*Lg if self._girder_end_support == 'Continuous' else 0.4*Lg
tot_min_lim = min([le_method1, lim_sniped_or_cont])
#Method 2:
CxG = math.sqrt(1-math.pow(sxsd/fy,2))
alphaG = 0 if E*t == 0 else 0.525*l/t*math.sqrt(fy/E)
CyG = (alphaG-0.22)/math.pow(alphaG,2) if alphaG>0.673 else 1
CtG = math.sqrt(1-3*math.pow(tsd/fy,2)) if tsd<fy/math.sqrt(3) else 0
le_div_l_method2 = CxG*CyG*CtG
le_method2 = le_div_l_method2*l
eff_width_sec_mod = tot_min_lim if self._stiffed_plate_effective_aginst_sigy else le_method2
eff_width_other_calc = le_method1 if self._stiffed_plate_effective_aginst_sigy else le_method2
le = eff_width_other_calc
AtotG = Ag + le * t
Iy = self._Girder.get_moment_of_intertia(efficent_se=le / 1000) * 1000 ** 4
zp = self._Girder.get_cross_section_centroid_with_effective_plate(le / 1000) * 1000 - t / 2 # ch7.5.1 page 19
zt = (t / 2 + self._Girder.hw + self._Girder.tf) - zp # ch 7.5.1 page 19
#
def red_prop():
twG =max(0,self._Girder.tw*(1-Vsd_div_Vrd))
le = eff_width_other_calc
AtotG = Ag+le*t-self._Girder.hw*(self._Girder.tw - twG)
Ipo = self._Girder.get_polar_moment(reduced_tw=twG)
IzG = self._Girder.get_Iz_moment_of_inertia(reduced_tw=twG)
Iy = self._Girder.get_moment_of_intertia(efficent_se=le/1000, reduced_tw=twG)*1000**4
zp = self._Girder.get_cross_section_centroid_with_effective_plate(le / 1000, reduced_tw=twG) * 1000 - t / 2 # ch7.5.1 page 19
zt = (t / 2 + self._Girder.hw + self._Girder.tf) - zp # ch 7.5.1 page 19
WeG = 0.0001 if zt == 0 else Iy / zt
Wep = 0.0001 if zp == 0 else Iy / zp
#print('In reduced', 'zp',zp,'zt',zt,'WeG',WeG,'Wep',Wep, 'Iy', Iy)
return {'tw':twG, 'Atot': AtotG, 'Ipo': Ipo, 'IzG': IzG, 'zp': zp, 'zt': zt, 'WeG': WeG, 'Wep': Wep}
if Vsd_div_Vrd < 0.5:
WeG = 0.0001 if zt == 0 else Iy/zt
Wep = 0.0001 if zp == 0 else Iy/zp
AeG = Ag+eff_width_other_calc*t
else:
red_param = red_prop()
WeG = red_param['WeG']
Wep = red_param['Wep']
AeG = red_param['Atot']
# #from: 7.7.3 Resistance parameters for stiffeners
Wmin = min([WeG, Wep])
pf = 0.0001 if l*s*gammaM == 0 else 12*Wmin*fy/(math.pow(l,2)*s*gammaM)
lk = Lg
LGk = lk if self._buckling_length_factor_girder is None else lk*self._buckling_length_factor_girder
#ie = 0 if Vsd_div_Vrd<0.5 else math.sqrt(Iy/AtotG)
ie = math.sqrt(Iy / AtotG)
fE = 0 if LGk == 0 else math.pow(math.pi,2)*E*math.pow(ie/LGk,2)
# 8.2 Girder forces, cont
alphaG = 0 if fE == 0 else math.sqrt(fy/fE)
Q = 0 if alphaG-0.2<0 else min([1, alphaG-0.2])
C_for_tsd_trg = Q*(7-5*math.pow(s/l,2))*math.pow((tsd-tcrg)/tcrl,2)
C = C_for_tsd_trg if tsd>tcrg else 0
p0lim = 0.02*(t+As/s)/l*(sxsd+C*tsd)
p0calc = 0 if s*self._Girder.hw*Lg*E*l==0 else 0.4*(t+As/s)/(self._Girder.hw*(1-s/Lg))*fy/E*math.pow(Lg/l,2)\
*(sxsd+C*tsd)
p0_compression = max([p0lim,p0calc])
p0_tension = 0 if s*Lg*self._Girder.hw*E*l==0 else 0.4*(t+As/s)/(self._Girder.hw*(l-s/Lg))*gammaM/E\
*math.pow(Lg/l,2)*(C*tsd)
p0 = p0_tension if sxsd<0 else p0_compression
qSd_pressure = (psd+p0_tension)*l if sxsd<0 else (psd+p0_compression)*l
qsd_oppsite = p0*l if psd<p0 else 0
qSd_plate_side = qsd_oppsite if self._overpressure_side == 'stiffener side' else qSd_pressure
qSd_girder_side = qsd_oppsite if self._overpressure_side == 'plate side' else qSd_pressure
#8.5 Torsional buckling of girders
Af = self._Girder.tf*self._Girder.b
Aw = self._Girder.hw*self._Girder.tw
Iz = self._Girder.get_Iz_moment_of_inertia()
b = max([self._Girder.b, self._Girder.tw])
C = 0.55 if self._Girder.get_stiffener_type() in ['T', 'FB'] else 1.1
LGT0 = b*C*math.sqrt(E*Af/(fy*(Af+Aw/3))) #TODO can add a automatic check/message if torsional buckling shall be considered
girder_data['Torsional buckling'] = 'Torsional buckling to be considered' if Ltg >LGT0 else \
"Torsional buckling need not to be considered"
def lt_params(LTG):
fETG = math.pow(math.pi, 2)*E*Iz/((Af+Aw/3)*math.pow(LTG, 2))
alphaTG = math.sqrt(fy/fETG)
mu = 0.35*(alphaTG-0.6)
fT_div_fy = (1 + mu + math.pow(alphaTG, 2) - math.sqrt(
math.pow(1 + mu + math.pow(alphaTG, 2), 2) - 4 * math.pow(alphaTG, 2))) / (2 * math.pow(alphaTG, 2))
fT = fT_div_fy*fy if alphaTG>0.6 else fy
return {'fETG': fETG, 'alphaT': alphaTG, 'mu': mu, 'fT_div_fy': fT_div_fy, 'fT': fT}
fk_dict = dict()
fr_dict = dict()
for lT in ['plate', Ltg, 0.4*Lg, 0.8*Lg]:
if lT != 'plate':
params = lt_params(lT)
fr = params['fT'] if params['alphaT']>0.6 else fy
alpha = math.sqrt(fr / fE)
mu = 0 if ie == 0 else (0.34 + 0.08 * zt / ie) * (alpha - 0.2)
else:
fr = fy
alpha = math.sqrt(fr / fE)
mu = 0 if ie == 0 else (0.34 + 0.08 * zp / ie) * (alpha - 0.2)
fr_dict[lT] = fr
fk_div_fr = (1 + mu + math.pow(alpha, 2) - math.sqrt(
math.pow(1 + mu + math.pow(alpha, 2), 2) - 4 * math.pow(alpha, 2))) / (2 * math.pow(alpha, 2))
fk = fk_div_fr * fr if alpha > 0.2 else fr
fk_dict[lT] = fk
# #7.7.3 Resistance parameters for stiffeners
NRd = 0.0001 if gammaM == 0 else AeG * (fy / gammaM) # eq7.65, checked ok
NksRd = AeG * (fk_dict[Ltg] / gammaM) #eq7.66
NkpRd = AeG * (fk_dict['plate'] / gammaM) # checked ok
MsRd = WeG*fr_dict[Ltg]/gammaM
Ms1Rd = WeG * (fr_dict[0.4*Lg] / gammaM) # ok
Ms2Rd = WeG * (fr_dict[0.8*Lg] / gammaM) # eq7.69 checked ok
MstRd = WeG*(fy/gammaM) #eq7.70 checked ok
MpRd = Wep*(fy/gammaM) #eq7.71 checked ok
NE = ((math.pow(math.pi,2))*E*AeG)/(math.pow(LGk/ie,2))# eq7.72 , checked ok
# print(fr_dict)
# print(fk_dict)
# print('WeG', WeG, 'Wep', Wep)
# print('NRd',NRd, 'NksRd',NksRd, 'NkpRd',NkpRd,'MsRd', MsRd,'MstRd', MstRd, 'Ms1Rd', Ms1Rd, 'Ms2Rd', Ms2Rd, 'MstRd', MstRd, 'MpRd', MpRd, 'Ne', Ne)
#7.7 Interaction formulas for axial compression and lateral pressure
#7.7.2 Simple supported girder (sniped girders)
if self._girder_end_support != 'Continuous':
u = 0
zstar = zp
girder_data['UF Cont. plate side'] = 0
girder_data['UF Cont. girder side'] = 0
# Lateral pressure on plate side:
uf_7_58 = NySd/NksRd-2*NySd/NRd +((qSd_plate_side*math.pow(Lg, 2)/8)+NySd*zstar)/(MstRd*(1-NySd/NE))+u
uf_7_59 = NySd/NkpRd+((qSd_plate_side*math.pow(Lg, 2)/8)+NySd*zstar)/(MpRd*(1-NySd/NE))+u
max_uf_simp_plate = max([0,uf_7_58,uf_7_59])
girder_data['UF Simplified plate side'] = max_uf_simp_plate
#Lateral pressure on girder side:
uf_7_60 = NySd/NksRd+((qSd_girder_side*math.pow(Lg, 2)/8)-NySd*zstar)/(Ms2Rd*(1-NySd/NE))+u
uf_7_61 = NySd/NkpRd-2*NySd/NRd+((qSd_girder_side*math.pow(Lg, 2)/8)-NySd*zstar)/(MpRd*(1-NySd/NE))+u
CHK_qSd_NSd = qSd_girder_side*math.pow(Lg, 2)/8 < NySd*zstar
uf_7_62 = NySd/NksRd-2*NySd/NRd+(NySd*zstar-(qSd_girder_side*math.pow(Lg, 2)/8))/(MstRd*(1-NySd/NE))+u
uf_7_63 = NySd/NkpRd+(NySd*zstar-(qSd_girder_side*math.pow(Lg, 2)/8))/(MpRd*(1-NySd/NE))+u
max_uf_simp_stiffener = max([0,uf_7_60,uf_7_61]) if CHK_qSd_NSd else max([0,uf_7_60,uf_7_61, uf_7_62,uf_7_63])
girder_data['UF Simplified girder side'] = max_uf_simp_stiffener
else:
u = 0
girder_data['UF Simplified girder side'] = 0
girder_data['UF Simplified plate side'] = 0
#7.7.1 Continuous stiffeners
M1Sd_pl = abs(qSd_plate_side)*math.pow(Lg, 2)/12
M2Sd_pl = abs(qSd_plate_side)*math.pow(Lg, 2)/24
M1Sd_stf = abs(qSd_girder_side)*math.pow(Lg, 2)/12
M2Sd_stf = abs(qSd_girder_side)*math.pow(Lg, 2)/24
# #Lateral pressure on plate side:
def iter_plate(zstar):
uf_7_48 = NySd/NksRd+(M1Sd_pl-NySd*zstar)/(Ms1Rd*(1-NySd/NE))+u
uf_7_49 = NySd/NkpRd-2*NySd/NRd +(M1Sd_pl-NySd*zstar)/(MpRd*(1-NySd/NE))+u
uf_7_50 = NySd/NksRd-2*NySd/NRd+(M2Sd_pl+NySd*zstar)/(MstRd*(1-NySd/NE))+u
uf_7_51 = NySd/NkpRd+(M2Sd_pl+NySd*zstar)/(MpRd*(1-NySd/NE))+u
return max([uf_7_48, uf_7_49, uf_7_50, uf_7_51])
res_iter_pl = minimize(iter_plate, 0, bounds=[[-zt + self._Girder.tf / 2, zp]])
if type(res_iter_pl.fun) == list:
girder_data['UF Cont. plate side'] = res_iter_pl.fun[0]
else:
girder_data['UF Cont. plate side'] = res_iter_pl.fun
# Lateral pressure on girder side:
def iter_girder(zstar):
uf_7_52 = NySd/NksRd-2*NySd/NRd +(M1Sd_stf +NySd*zstar)/(MstRd*(1-NySd/NE))+u
uf_7_53 = NySd/NkpRd+(M1Sd_stf +NySd*zstar)/(MpRd*(1-NySd/NE))+u
uf_7_54 = NySd/NksRd+(M2Sd_stf-NySd*zstar)/(Ms2Rd*(1-NySd/NE))+u
uf_7_55 = NySd/NkpRd-2*NySd/NRd+(M2Sd_stf-NySd*zstar)/(MpRd*(1-NySd/NE))+u
return max([uf_7_52, uf_7_53 ,uf_7_54 ,uf_7_55])
res_iter_girder = minimize(iter_girder, 0, bounds=[[-zt + self._Girder.tf / 2, zp]])
if type(res_iter_girder.fun) == list:
girder_data['UF Cont. girder side'] = res_iter_girder.fun[0]
else:
girder_data['UF Cont. girder side'] = res_iter_girder.fun
return girder_data
def local_buckling(self, optimizing = False):
'''
Checks for girders and stiffeners
'''
fy = self._yield
if self._Stiffener is not None:
max_web_stf = 42*self._Stiffener.tw*math.sqrt(235/fy) if self._Stiffener.get_stiffener_type() != 'FB' else 0
max_flange_stf = (14 if self._fab_method_stiffener == 'welded' else 15) * self._Stiffener.tf *math.sqrt(235/fy)
else:
max_web_stf = 0
max_flange_stf = 0
if self._Girder is not None:
max_web_girder = 42*self._Girder.tw*math.sqrt(235/fy) if self._Girder.get_stiffener_type() != 'FB' else 0
max_flange_girder = (14 if self._fab_method_girder == 'welded' else 15) * self._Girder.tf *math.sqrt(235/fy)
else:
max_web_girder = 0
max_flange_girder = 0
return {'Stiffener': [max_web_stf, max_flange_stf], 'Girder': [max_web_girder, max_flange_girder]}
# def get_tuple(self):
# ''' Return a tuple of the plate stiffener'''
# return (self.Plate.get_s(), self.Plate.get_pl_thk(), self.Stiffener.get_web_thk(), self.Stiffener.get_web_thk(),
# self.Stiffener.get_fl_w(), self.Stiffener.get_fl_thk(), self.Plate.get_span(), self.Plate.get_lg(),
# self.Stiffener.get_stiffener_type())
def get_one_line_string_mixed(self):
''' Returning a one line string. '''
return 'pl_'+str(round(self.Plate.s, 1))+'x'+str(round(self.Plate.t,1))+' stf_'+\
self.Stiffener.get_stiffener_type()+\
str(round(self.Stiffener.hw,1))+'x'+str(round(self.Stiffener.tw,1))+'+'\
+str(round(self.Stiffener.b,1))+'x'+\
str(round(self.Stiffener.tf,1))
def get_extended_string_mixed(self):
''' Some more information returned. '''
return 'span: '+str(round(self.Plate.get_span(),4))+' structure type: '+ self.Stiffener.get_structure_type() + ' stf. type: ' + \
self.Stiffener.get_stiffener_type() + ' pressure side: ' + self.Plate.overpressure_side
class Shell():
'''
Small class to contain shell properties.
'''
def __init__(self, main_dict):
super(Shell, self).__init__()
'''
shell_dict = {'plate_thk': [self._new_shell_thk.get() / 1000, 'm'],
'radius': [self._new_shell_radius.get() / 1000, 'm'],
'distance between rings, l': [self._new_shell_dist_rings.get() / 1000, 'm'],
'length of shell, L': [self._new_shell_length.get() / 1000, 'm'],
'tot cyl length, Lc': [self._new_shell_tot_length.get() / 1000, 'm'],
'eff. buckling lenght factor': [self._new_shell_k_factor.get() / 1000, 'm'],
'mat_yield': [self._new_shell_yield.get() *1e6, 'Pa']}
'''
self._thk = main_dict['plate_thk'][0]
self._yield = main_dict['mat_yield'][0]
self._radius = main_dict['radius'][0]
self._dist_between_rings = main_dict['distance between rings, l'][0]
self._length_of_shell = main_dict['length of shell, L'][0]
self._tot_cyl_length = main_dict['tot cyl length, Lc'][0]
self._k_factor = main_dict['eff. buckling lenght factor'][0]
# For conical
self._cone_r1 = None
self._cone_r2 = None
self._cone_alpha = None
@property
def Lc(self):
return self._tot_cyl_length
@Lc.setter
def Lc(self, val):
self._tot_cyl_length = val
@property
def thk(self):
return self._thk
@thk.setter
def thk(self, val):
self._thk = val
@property
def radius(self):
return self._radius
@radius.setter
def radius(self, val):
self._radius = val
@property
def dist_between_rings(self):
return self._dist_between_rings
@dist_between_rings.setter
def dist_between_rings(self, val):
self._dist_between_rings = val
@property
def length_of_shell(self):
return self._length_of_shell
@length_of_shell.setter
def length_of_shell(self, val):
self._length_of_shell = val
@property
def tot_cyl_length(self):
return self._tot_cyl_length
@tot_cyl_length.setter
def tot_cyl_length(self, val):
self._tot_cyl_length = val
@property
def k_factor(self):
return self._k_factor
@k_factor.setter
def k_factor(self, val):
self._k_factor = val
def get_Zl(self):
L = self.tot_cyl_length*1000
Zl = math.pow(L,2)*math.sqrt(1-math.pow(0.3,2))/(self._radius*1000 * self._thk*1000) if self._thk*self._radius else 0
return Zl
def get_effective_width_shell_plate(self):
return 1.56*math.sqrt(self._radius * self._thk)/(1+12*self.thk/self._radius)
def get_main_properties(self):
main_data = {'plate_thk': [self._thk, 'm'],
'radius': [self._radius, 'm'],
'distance between rings, l': [self._dist_between_rings, 'm'],
'length of shell, L': [self._length_of_shell, 'm'],
'tot cyl length, Lc': [self._tot_cyl_length, 'm'],
'eff. buckling lenght factor': [self._k_factor, 'm'],
'mat_yield': [self._yield, 'Pa']}
return main_data
def set_main_properties(self, main_dict):
self._thk = main_dict['plate_thk'][0]
self._yield = main_dict['mat_yield'][0]
self._radius = main_dict['radius'][0]
self._dist_between_rings = main_dict['distance between rings, l'][0]
self._length_of_shell = main_dict['length of shell, L'][0]
self._tot_cyl_length = main_dict['tot cyl length, Lc'][0]
self._k_factor = main_dict['eff. buckling lenght factor'][0]
class CylinderAndCurvedPlate():
'''
Buckling of cylinders and curved plates.
Geomeries
Selections for: Type of Structure Geometry:
geomeries = {1:'Unstiffened shell (Force input)', 2:'Unstiffened panel (Stress input)',
3:'Longitudinal Stiffened shell (Force input)',
4:'Longitudinal Stiffened panel (Stress input)',
5:'Ring Stiffened shell (Force input)',
6:'Ring Stiffened panel (Stress input)',
7:'Orthogonally Stiffened shell (Force input)',
8:'Orthogonally Stiffened panel (Stress input)'}
'''
geomeries = {11:'Flat plate, stiffened',10: 'Flat plate, unstiffened', 12: 'Flat plate, stiffened with girder',
1:'Unstiffened shell (Force input)', 2:'Unstiffened panel (Stress input)',
3:'Longitudinal Stiffened shell (Force input)', 4:'Longitudinal Stiffened panel (Stress input)',
5:'Ring Stiffened shell (Force input)', 6:'Ring Stiffened panel (Stress input)',
7:'Orthogonally Stiffened shell (Force input)', 8:'Orthogonally Stiffened panel (Stress input)'}
geomeries_map = dict()
for key, value in geomeries.items():
geomeries_map[value] = key
def __init__(self, main_dict = None, shell: Shell = None, long_stf: Structure = None, ring_stf: Structure = None,
ring_frame: Structure = None):
super(CylinderAndCurvedPlate, self).__init__()
# main_dict = {'sasd': 100, 'smsd': 100, 'tTsd': 50, 'tQsd':10, 'psd': -0.3, 'shsd': 0, 'geometry': 7,
# 'material factor': self._mat_factor, 'lT': 0, 'delta0': 0.005, 'fab method ring stf': 1,
# 'fab method ring girder': 2, 'E-module':2.1e11, 'poisson': 0.3, 'yield': 355e6}
#if main_dict['geometry'][0] in [1,3,5,7]: # Need to convert from forces to stresses.
self._sasd = main_dict['sasd'][0]
self._smsd = main_dict['smsd'][0]
self._tTsd = abs(main_dict['tTsd'][0])
self._tQsd= main_dict['tQsd'][0]
self._psd = main_dict['psd'][0]
self._shsd = main_dict['shsd'][0]
self._geometry = main_dict['geometry'][0]
self._mat_factor = main_dict['material factor'][0]
self._delta0 = main_dict['delta0'][0]
self._fab_method_ring_stf = main_dict['fab method ring stf'][0]
self._fab_method_ring_girder = main_dict['fab method ring girder'][0]
self._E = main_dict['E-module'][0]
self._v = main_dict['poisson'][0]
self._yield = main_dict['mat_yield'][0]
self._Shell = shell
self._LongStf = long_stf
self._RingStf = ring_stf
self._RingFrame = ring_frame
self._length_between_girders = main_dict['length between girders'][0]
self._panel_spacing = main_dict['panel spacing, s'][0]
self.__ring_stiffener_excluded = main_dict['ring stf excluded'][0]
self.__ring_frame_excluded = main_dict['ring frame excluded'][0]
self._end_cap_pressure_included = main_dict['end cap pressure'][0]
self._uls_or_als = main_dict['ULS or ALS'][0]
def __str__(self):
'''
Returning all properties.
'''
long_string = 'N/A' if self._LongStf is None else self._LongStf.get_beam_string()
ring_string = 'N/A' if self._RingStf is None else self._RingStf.get_beam_string()
frame_string = 'N/A' if self._RingFrame is None else self._RingFrame.get_beam_string()
s = max([self._Shell.dist_between_rings, 2*math.pi*self._Shell.radius])*1000 if self._LongStf == None else \
self._LongStf.s
return \
str(
'\n Cylinder radius: ' + str(round(self._Shell.radius,3)) + ' meters' +
'\n Cylinder thickness: ' + str(self._Shell.thk*1000)+' mm'+
'\n Distance between rings, l: ' + str(self._Shell.dist_between_rings*1000)+' mm'+
'\n Length of shell, L: ' + str(self._Shell.length_of_shell*1000)+' mm'+
'\n Total cylinder lenght: ' + str(self._Shell.tot_cyl_length*1000)+' mm'+
'\n Eff. Buckling length factor: ' + str(self._Shell.k_factor)+
'\n Material yield: ' + str(self._yield/1e6)+' MPa'+
'\n Spacing/panel circ., s: ' + str(s) + ' mm' +
'\n Longitudinal stiffeners: ' + long_string+
'\n Ring stiffeners ' + ring_string+
'\n Ring frames/girders: ' + frame_string+
'\n Design axial stress/force: ' + str(self._sasd/1e6)+' MPa'+
'\n Design bending stress/moment: ' + str(self._smsd/1e6)+' MPa'+
'\n Design tosional stress/moment: ' + str(self._tTsd/1e6)+' MPa'+
'\n Design shear stress/force: ' + str(self._tQsd/1e6)+' MPa'+
'\n Design lateral pressure ' + str(self._psd/1e6)+' MPa'+
'\n Additional hoop stress ' + str(self._shsd/1e6)+' MPa')
@property
def sasd(self):
return self._sasd
@sasd.setter
def sasd(self, val):
self._sasd = val
@property
def smsd(self):
return self._smsd
@smsd.setter
def smsd(self, val):
self._smsd = val
@property
def tTsd(self):
return abs(self._tTsd)
@tTsd.setter
def tTsd(self, val):
self._tTsd = abs(val)
@property
def tQsd(self):
return self._tQsd
@tQsd.setter
def tQsd(self, val):
self._tQsd = val
@property
def psd(self):
return self._psd
@psd.setter
def psd(self, val):
self._psd = val
@property
def shsd(self):
return self._shsd
@shsd.setter
def shsd(self, val):
self._shsd = val
@property
def panel_spacing(self):
return self._panel_spacing
@panel_spacing.setter
def panel_spacing(self, val):
self._panel_spacing = val
@property
def ShellObj(self):
return self._Shell
@ShellObj.setter
def ShellObj(self, val):
self._Shell = val
@property
def LongStfObj(self):
return self._LongStf
@LongStfObj.setter
def LongStfObj(self, val):
self._LongStf = val
@property
def RingStfObj(self):
return self._RingStf
@RingStfObj.setter
def RingStfObj(self, val):
self._RingStf = val
@property
def RingFrameObj(self):
return self._RingFrame
@RingFrameObj.setter
def RingFrameObj(self, val):
self._RingFrame = val
@property
def geometry(self):
return self._geometry
@geometry.setter
def geometry(self, val):
self._geometry = val
@property
def length_between_girders(self):
return self._length_between_girders
@length_between_girders.setter
def length_between_girders(self, val):
self._length_between_girders = val
@property
def _ring_stiffener_excluded(self):
return self.__ring_stiffener_excluded
@_ring_stiffener_excluded.setter
def _ring_stiffener_excluded(self, val):
self.__ring_stiffener_excluded = val
@property
def _ring_frame_excluded(self):
return self.__ring_frame_excluded
@_ring_frame_excluded.setter
def _ring_frame_excluded(self, val):
self.__ring_frame_excluded = val
def get_utilization_factors(self, optimizing = False, empty_result_dict = False):
'''
If optimizing running time must be reduced.
'''
# Local buckling of stiffeners
results = {'Unstiffened shell': None,
'Longitudinal stiffened shell': None,
'Ring stiffened shell': None,
'Heavy ring frame': None,
'Column stability check': None,
'Stiffener check': None,
'Stiffener check detailed': None,
'Weight': None}
if empty_result_dict:
return results
data_shell_buckling = self.shell_buckling()
unstiffend_shell, column_buckling_data = None, None
# UF for unstiffened shell
unstiffend_shell = self.unstiffened_shell(shell_data=data_shell_buckling)
s = self._panel_spacing*1000 if self._LongStf is None else self._LongStf.s
if any([self._geometry in [1, 5], s > self._Shell.dist_between_rings*1000]):
uf_unstf_shell = unstiffend_shell['UF unstiffened circular cylinder']
results['Unstiffened shell'] = uf_unstf_shell
else:
uf_unstf_shell = unstiffend_shell['UF unstiffened curved panel']
results['Unstiffened shell'] = uf_unstf_shell
if optimizing:
if uf_unstf_shell > 1:
return False, 'UF unstiffened', results
# UF for longitudinal stiffened shell
if self._geometry in [3,4,7,8]:
if self._LongStf is not None:
column_buckling_data= self.column_buckling(unstf_shell_data=unstiffend_shell,
shell_bukcling_data=data_shell_buckling)
long_stf_shell = self.longitudinally_stiffened_shell(column_buckling_data=column_buckling_data,
unstiffened_shell=unstiffend_shell)
results['Column stability check'] = column_buckling_data['Column stability check']
results['Need to check column buckling'] = column_buckling_data['Need to check column buckling']
results['Stiffener check'] = column_buckling_data['stiffener check']
results['Stiffener check detailed'] = column_buckling_data['stiffener check detailed']
if self._geometry in [3,4,7,8] and long_stf_shell['fksd'] > 0:
results['Longitudinal stiffened shell'] = long_stf_shell['sjsd_used']/long_stf_shell['fksd']\
if self._geometry in [3,4,7,8] else 0
if optimizing:
if not results['Column stability check']:
return False, 'Column stability', results
elif False in results['Stiffener check'].values():
return False, 'Stiffener check', results
elif results['Longitudinal stiffened shell'] > 1:
return False, 'UF longitudinal stiffeners', results
if self._geometry in [5,6,7,8]:
# UF for panel ring buckling
ring_stf_shell = None
if self._RingStf is not None:
column_buckling_data = column_buckling_data if column_buckling_data is not None \
else self.column_buckling( unstf_shell_data=unstiffend_shell,
shell_bukcling_data=data_shell_buckling)
ring_stf_shell = self.ring_stiffened_shell(data_shell_buckling=data_shell_buckling,
column_buckling_data=column_buckling_data)
results['Column stability check'] = column_buckling_data['Column stability check']
results['Need to check column buckling'] = column_buckling_data['Need to check column buckling']
results['Stiffener check'] = column_buckling_data['stiffener check']
results['Stiffener check detailed'] = column_buckling_data['stiffener check detailed']
results['Ring stiffened shell'] = ring_stf_shell[0]
if optimizing:
if not results['Column stability check']:
return False, 'Column stability', results
elif False in results['Stiffener check'].values():
return False, 'Stiffener check', results
elif results['Ring stiffened shell'] > 1:
return False, 'UF ring stiffeners', results
# UF for ring frame
if self._geometry in [5, 6, 7, 8]:
if self._RingFrame is not None:
column_buckling_data = column_buckling_data if column_buckling_data is not None \
else self.column_buckling( unstf_shell_data=unstiffend_shell,
shell_bukcling_data=data_shell_buckling)
ring_stf_shell = ring_stf_shell if ring_stf_shell is not None else\
self.ring_stiffened_shell(data_shell_buckling=data_shell_buckling,
column_buckling_data=column_buckling_data)
results['Column stability check'] = column_buckling_data['Column stability check']
results['Need to check column buckling'] = column_buckling_data['Need to check column buckling']
results['Stiffener check'] = column_buckling_data['stiffener check']
results['Stiffener check detailed'] = column_buckling_data['stiffener check detailed']
results['Heavy ring frame'] = ring_stf_shell[1]
if optimizing:
if not results['Column stability check']:
return False, 'Column stability', results
elif False in results['Stiffener check'].values():
return False, 'Stiffener check', results
elif results['Heavy ring frame'] > 1:
return False, 'UF ring frame', results
if optimizing:
return True, 'Check OK', results
# print('Results for geometry', self._geometry)
# print('UF',uf_unstf_shell, uf_long_stf, uf_ring_stf, uf_ring_frame)
# print('Stiffeners', stiffener_check)
return results
def set_main_properties(self, main_dict):
self._sasd = main_dict['sasd'][0]
self._smsd = main_dict['smsd'][0]
self._tTsd = abs(main_dict['tTsd'][0])
self._tQsd= main_dict['tQsd'][0]
self._psd = main_dict['psd'][0]
self._shsd = main_dict['shsd'][0]
self._geometry = main_dict['geometry'][0]
self._mat_factor = main_dict['material factor'][0]
self._delta0 = main_dict['delta0'][0]
self._fab_method_ring_stf = main_dict['fab method ring stf'][0]
self._fab_method_ring_girder = main_dict['fab method ring girder'][0]
self._E = main_dict['E-module'][0]
self._v = main_dict['poisson'][0]
self._yield = main_dict['mat_yield'][0]
self._length_between_girders = main_dict['length between girders'][0]
self._panel_spacing = main_dict['panel spacing, s'][0]
self.__ring_stiffener_excluded = main_dict['ring stf excluded'][0]
self.__ring_frame_excluded = main_dict['ring frame excluded'][0]
self._end_cap_pressure_included = main_dict['end cap pressure'][0]
self._uls_or_als = main_dict['ULS or ALS'][0]
def shell_buckling(self):
'''
Main sheet to calculate cylinder buckling.
'''
stucture_objects = {'Unstiffened':self._Shell, 'Long Stiff.': self._LongStf, 'Ring Stiffeners': self._RingStf,
'Heavy ring Frame': self._RingFrame}
stf_type = ['T', 'FB', 'T']
l = self._Shell.dist_between_rings*1000
r = self._Shell.radius*1000
t = self._Shell.thk*1000
parameters, cross_sec_data = list(), list()
for idx, obj in stucture_objects.items():
if obj is None:
cross_sec_data.append([np.nan, np.nan, np.nan, np.nan, np.nan])
if idx not in ['Unstiffened', 'Long Stiff.']:
parameters.append([np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan])
continue
if idx != 'Unstiffened':
hs = obj.hw/2 if stf_type =='FB' else obj.hw + obj.tf/2
It = obj.get_torsional_moment_venant()
se = self._Shell.get_effective_width_shell_plate()
Ipo = obj.get_polar_moment()
Iz = obj.get_Iz_moment_of_inertia()
Iy = obj.get_moment_of_intertia(efficent_se=se, tf1=self._Shell.thk)*1000**4
cross_sec_data.append([hs, It, Iz, Ipo, Iy])
A = obj.get_cross_section_area(include_plate=False)*math.pow(1000,2)
beta = l/(1.56*math.sqrt(r*t))
leo = (l/beta) * ((math.cosh(2*beta)-math.cos(2*beta))/(math.sinh(2*beta)+math.sin(2*beta)))
worst_axial_comb = min(self._sasd/1e6 - self._smsd/1e6, self._sasd/1e6 + self._smsd/1e6)
sxsd_used = worst_axial_comb
if idx == 'Long Stiff.':
zp = obj.get_cross_section_centroid_with_effective_plate(include_plate=False) * 1000
h_tot = obj.hw + obj.tf
zt = h_tot -zp
else:
se = self._Shell.get_effective_width_shell_plate()
zp = obj.get_cross_section_centroid_with_effective_plate(se=se, tf1=self._Shell.thk) * 1000 # ch7.5.1 page 19
h_tot = self._Shell.thk*1000 + obj.hw + obj.tf
zt = h_tot -zp
if idx not in ['Unstiffened', 'Long Stiff.']: # Parameters
alpha = A/(leo*t)
zeta = max([0, 2*(math.sinh(beta)*math.cos(beta)+math.cosh(beta)*math.sin(beta))/
(math.sinh(2*beta)+math.sin(2*beta))])
rf = r - t / 2 - (obj.hw + obj.tf)
r0 = zt + rf
parameters.append([alpha, beta, leo, zeta, rf, r0, zt])
sxsd, shsd, shRsd, tsd = list(), list(), list(), list()
for idx, obj in stucture_objects.items():
if obj is None:
shRsd.append(np.nan)
continue
if idx == 'Unstiffened':
shsd.append((self._psd/1e6)*r/t + self._shsd/1e6)
sxsd.append(self._sasd/1e6+self._smsd/1e6 if self._geometry in [2,6] else
min([self._sasd/1e6, self._sasd/1e6-self._smsd/1e6, self._sasd/1e6+self._smsd/1e6]))
tsd.append(self._tTsd/1e6 + self._tQsd/1e6)
elif idx == 'Long Stiff.':
if stucture_objects['Ring Stiffeners'] == None:
shsd.append(shsd[0]+self._shsd/1e6)
else:
shsd_ring = ((self._psd/1e6)*r/t)-parameters[0][0]*parameters[0][3]/(parameters[0][0]+1)*\
((self._psd/1e6)*r/t-0.3*sxsd[0])
shsd.append(shsd_ring + self._shsd/1e6)
if self._geometry in [3,4,7,8]:
sxsd.append(sxsd_used)
else:
sxsd.append(sxsd[0])
tsd.append(self._tTsd/1e6 + self._tQsd/1e6)
elif idx == 'Ring Stiffeners':
rf = parameters[0][4]
shsd_ring = ((self._psd / 1e6) * r / t) - parameters[0][0] * parameters[0][3] / (parameters[0][0] + 1) * \
((self._psd / 1e6) * r / t - 0.3 * sxsd[0])
shsd.append(np.nan if stucture_objects['Ring Stiffeners'] == None else shsd_ring)
shRsd.append(((self._psd/1e6)*r/t-0.3*sxsd[0])*(1/(1+parameters[0][0]))*(r/rf))
if self._geometry > 4:
sxsd.append(sxsd[0])
tsd.append(tsd[0])
else:
sxsd.append(np.nan)
tsd.append(np.nan)
else:
rf = parameters[1][4]
shsd.append(((self._psd/1e6)*r/t)-parameters[1][0]*parameters[1][3]/(parameters[1][0]+1)*
((self._psd/1e6)*r/t-0.3*self._sasd/1e6))
shRsd.append(((self._psd/1e6)*r/t-0.3*self._sasd/1e6)*(1/(1+parameters[1][0]))*(r/rf))
if self._geometry > 4:
sxsd.append(sxsd[0])
tsd.append(tsd[0])
else:
sxsd.append(np.nan)
tsd.append(np.nan)
sxsd = np.array(sxsd)
shsd = np.array(shsd)
tsd = np.array(np.abs(tsd))
sjsd = np.sqrt(sxsd**2 - sxsd*shsd + shsd**2+3*tsd**2)
return {'sjsd': sjsd, 'parameters': parameters, 'cross section data': cross_sec_data,
'shRsd': shRsd, 'shsd': shsd, 'sxsd': sxsd}
def unstiffened_shell(self, conical = False, shell_data = None):
E = self._E/1e6
t = self._Shell.thk*1000
# get correct s
s = min([self._Shell.dist_between_rings, 2*math.pi*self._Shell.radius])*1000 if self._LongStf == None else \
self._LongStf.s
v = self._v
r = self._Shell.radius*1000
l = self._Shell.dist_between_rings * 1000
fy = self._yield/1e6
sasd = self._sasd/1e6
smsd = self._smsd/1e6
tsd = abs(self._tTsd/1e6+self._tQsd/1e6)
psd = self._psd/1e6
if self._RingStf is None:
shsd = shell_data['shsd'][0]
else:
shsd = shell_data['shsd'][1]
provide_data = dict()
'''
Selections for: Type of Structure Geometry:
1 Unstiffened shell (Force input)
2 Unstiffened panel (Stress input)
3 Longitudinal Stiffened shell (Force input)
4 Longitudinal Stiffened panel (Stress input)
5 Ring Stiffened shell (Force input)
6 Ring Stiffened panel (Stress input)
7 Orthogonally Stiffened shell (Force input)
8 Orthogonally Stiffened panel (Stress input)
Selected:
3 Longitudinal Stiffened shell (Force input)
'''
# Pnt. 3.3 Unstifffed curved panel
geometry = self._geometry
if geometry in [2,6]:
sxsd = sasd+smsd
else:
sxsd = min(sasd, sasd+smsd, sasd-smsd)
if smsd < 0:
smsd = -smsd
sm0sd = -smsd
else:
if geometry in [2, 6]:
smsd = 0
sm0sd = 0
else:
smsd = smsd
sm0sd = smsd
sjsd = math.sqrt(math.pow(sxsd,2) - sxsd*shsd + math.pow(shsd,2) + 3 * math.pow(tsd, 2)) # (3.2.3)
Zs = (math.pow(s, 2) / (r * t)) * math.sqrt(1 - math.pow(v, 2)) # The curvature parameter Zs (3.3.3)
def table_3_1(chk):
psi = {'Axial stress': 4, 'Shear stress': 5.34+4*math.pow(s/l, 2),
'Circumferential compression': math.pow(1+math.pow(s/l, 2), 2)} # ψ
epsilon = {'Axial stress': 0.702*Zs, 'Shear stress': 0.856*math.sqrt(s/l)*math.pow(Zs, 3/4),
'Circumferential compression': 1.04*(s/l)*math.sqrt(Zs)} # ξ
rho = {'Axial stress': 0.5*math.pow(1+(r/(150*t)), -0.5), 'Shear stress': 0.6,
'Circumferential compression': 0.6}
return psi[chk], epsilon[chk], rho[chk]
vals = list()
for chk in ['Axial stress', 'Shear stress', 'Circumferential compression']:
psi, epsilon, rho = table_3_1(chk=chk)
C = psi * math.sqrt(1 + math.pow(rho * epsilon / psi, 2)) # (3.4.2) (3.6.4)
fE = C*(math.pow(math.pi, 2)*E/(12*(1-math.pow(v,2)))) *math.pow(t/s,2)
#print(chk, 'C', C, 'psi', psi,'epsilon', epsilon,'rho' ,rho, 'fE', fE)
vals.append(fE)
fEax, fEshear, fEcirc = vals
sa0sd = -sxsd if sxsd < 0 else 0
sh0sd = -shsd if shsd < 0 else 0 # Maximium allowable stress from iteration.
if any([val == 0 for val in vals]):
lambda_s_pow = 0
else:
lambda_s_pow = (fy/sjsd) * (sa0sd/fEax + sh0sd/fEcirc + tsd/fEshear)
lambda_s = math.sqrt(lambda_s_pow)
fks = fy/math.sqrt(1+math.pow(lambda_s,4 ))
provide_data['fks - Unstifffed curved panel'] = fks
if lambda_s < 0.5:
gammaM = self._mat_factor
else:
if self._mat_factor == 1.1:
if lambda_s > 1:
gammaM = 1.4
else:
gammaM = 0.8+0.6*lambda_s
elif self._mat_factor == 1.15:
if lambda_s > 1:
gammaM = 1.45
else:
gammaM = 0.85+0.6*lambda_s
else:
if lambda_s > 1:
gammaM = 1.45 * (self._mat_factor/1.15)
else:
gammaM = 0.85+0.6*lambda_s * (self._mat_factor/1.15)
if self._uls_or_als == 'ALS':
gammaM = gammaM/self._mat_factor
provide_data['gammaM Unstifffed panel'] = gammaM
fksd = fks/gammaM
provide_data['fksd - Unstifffed curved panel'] = fksd
uf = sjsd/fksd
provide_data['UF unstiffened curved panel'] = uf
provide_data['gammaM curved panel'] = gammaM
sjsd_max = math.sqrt(math.pow(sasd+smsd,2)-(sasd+smsd)*shsd+math.pow(shsd,2)+3*math.pow(tsd,2))
uf_max = self._mat_factor* sjsd_max/fy
# print('Unstifffed curved panel', 'UF', uf, 'UFmax', uf_max, 'sigjsd', sjsd, 'Zs', Zs, 'lambda_s', lambda_s,
# 'fks', fks, 'gammaM', gammaM, 'sjsd_max', sjsd_max)
def iter_table_1():
found, sasd_iter, count, this_val, logger = False, 0 if uf > 1 else sasd, 0, 0, list()
while not found:
# Iteration
sigmsd_iter = smsd if geometry in [2,6] else min([-smsd, smsd])
siga0sd_iter = 0 if sasd_iter >= 0 else -sasd_iter # (3.2.4)
sigm0sd_iter = 0 if sigmsd_iter >= 0 else -sigmsd_iter # (3.2.5)
sigh0sd_iter = 0 if shsd>= 0 else -shsd # (3.2.6)
sjsd_iter = math.sqrt(math.pow(sasd_iter+sigmsd_iter, 2) - (sasd_iter+sigmsd_iter)*shsd + math.pow(shsd, 2)+
3*math.pow(tsd, 2)) #(3.2.3)
lambdas_iter = math.sqrt((fy / sjsd_iter) * ((siga0sd_iter+sigm0sd_iter)/fEax+ sigh0sd_iter/fEcirc+tsd/fEshear)) # (3.2.2)
gammaM_iter = 1 # As taken in the DNVGL sheets
fks_iter = fy / math.sqrt(1 + math.pow(lambdas_iter,4))
fksd_iter = fks_iter / gammaM_iter
#print('sjsd', sjsd_iter, 'fksd', fksd_iter, 'fks', fks, 'gammaM', gammaM_iter, 'lambdas_iter', lambdas_iter)
this_val = sjsd_iter/fksd_iter
logger.append(0 if this_val > 1 else siga0sd_iter)
if this_val > 1.0 or count == 1e6:
found = True
count += 1
if this_val >0.98:
sasd_iter -= 0.5
elif this_val > 0.95:
sasd_iter -= 1
elif this_val > 0.9:
sasd_iter -= 2
elif this_val > 0.7:
sasd_iter -= 10
else:
sasd_iter -= 20
#print(sasd_iter, this_val)
return 0 if len(logger) == 1 else max([logger[-2],0])
provide_data['max axial stress - 3.3 Unstifffed curved panel'] = iter_table_1()
# Pnt. 3.4 Unstifffed circular cylinders
Zl = (math.pow(l, 2)/(r*t)) * math.sqrt(1 - math.pow(v, 2)) #(3.4.3) (3.6.5)
provide_data['Zl'] = Zl
def table_3_2(chk):
psi = {'Axial stress': 1, 'Bending': 1,
'Torsion and shear force': 5.34,
'Lateral pressure': 4, 'Hydrostatic pressure': 2} # ψ
zeta= {'Axial stress': 0.702*Zl, 'Bending': 0.702*Zl,
'Torsion and shear force': 0.856* math.pow(Zl, 3/4),'Lateral pressure': 1.04*math.sqrt(Zl),
'Hydrostatic pressure': 1.04*math.sqrt(Zl)} # ξ
rho = {'Axial stress': 0.5*math.pow(1+(r/(150*t)), -0.5), 'Bending': 0.5*math.pow(1+(r/(300*t)), -0.5),
'Torsion and shear force': 0.6,
'Lateral pressure': 0.6, 'Hydrostatic pressure': 0.6}
return psi[chk], zeta[chk], rho[chk]
vals = list()
for chk in ['Axial stress', 'Bending', 'Torsion and shear force',
'Lateral pressure','Hydrostatic pressure']:
psi, zeta, rho = table_3_2(chk=chk)
C = psi * math.sqrt(1 + math.pow(rho * zeta / psi, 2)) # (3.4.2) (3.6.4)
fE = C*math.pow(math.pi,2)*E / (12*(1-math.pow(v,2))) * math.pow(t/l,2)
#print(chk, 'C', C, 'psi', psi,'epsilon', epsilon,'rho' ,rho, 'fE', fE)
vals.append(fE)
fEax, fEbend, fEtors, fElat, fEhyd = vals
provide_data['fEax - Unstifffed circular cylinders'] = fEax
test1 = 3.85 * math.sqrt(r / t)
test2 = 2.25 * math.sqrt(r / t)
test_l_div_r = l/r
provide_data['fEh - Unstifffed circular cylinders - Psi=4'] = 0.25*E*math.pow(t/r,2) if test_l_div_r > test2 else fElat
if l / r > test1:
fEt_used = 0.25 * E * math.pow(t / r, 3 / 2) # (3.4.4)
else:
fEt_used = fEtors
if l / r > test2:
fEh_used = 0.25 * E * math.pow(t / r, 2)
else:
fEh_used = fElat if self._end_cap_pressure_included == 'not included in axial stresses' else fEhyd
sjsd = math.sqrt(math.pow(sxsd,2) - sxsd*shsd + math.pow(shsd,2) + 3 * math.pow(tsd, 2)) # (3.2.3)
sa0sd = -sasd if sasd < 0 else 0
sh0sd = -shsd if shsd < 0 else 0
if any([fEax == 0, fEbend == 0, fEt_used == 0, fEh_used == 0, sjsd == 0]):
lambda_s_pow = 0
else:
lambda_s_pow = (fy/sjsd) * (sa0sd/fEax + sm0sd/fEbend + sh0sd/fEh_used + tsd/fEt_used)
lambda_s = math.sqrt(lambda_s_pow)
fks = fy/math.sqrt(1+math.pow(lambda_s,4 ))
provide_data['fks - Unstifffed circular cylinders'] = fks
if lambda_s < 0.5:
gammaM = self._mat_factor
else:
if self._mat_factor == 1.1:
if lambda_s > 1:
gammaM = 1.4
else:
gammaM = 0.8+0.6*lambda_s
elif self._mat_factor == 1.15:
if lambda_s > 1:
gammaM = 1.45
else:
gammaM = 0.85+0.6*lambda_s
else:
if lambda_s > 1:
gammaM = 1.45 * (self._mat_factor/1.15)
else:
gammaM = 0.85+0.6*lambda_s * (self._mat_factor/1.15)
if self._uls_or_als == 'ALS':
gammaM = gammaM/self._mat_factor
fksd = fks/gammaM
provide_data['fksd - Unstifffed circular cylinders'] = fksd
uf = sjsd/fksd
provide_data['UF unstiffened circular cylinder'] = uf
provide_data['gammaM circular cylinder'] = gammaM
#print('UF', uf, 'Unstifffed circular cylinders')
def iter_table_2():
found, sasd_iter, count, this_val, logger = False, 0 if uf > 1 else sasd, 0, 0, list()
while not found:
# Iteration
sigmsd_iter = smsd if geometry in [2, 6] else min([-smsd, smsd])
siga0sd_iter = 0 if sasd_iter >= 0 else -sasd_iter # (3.2.4)
sigm0sd_iter = 0 if sigmsd_iter >= 0 else -sigmsd_iter # (3.2.5)
sigh0sd_iter = 0 if shsd >= 0 else -shsd # (3.2.6)
sjsd_iter = math.sqrt(
math.pow(sasd_iter + sigmsd_iter, 2) - (sasd_iter + sigmsd_iter) * shsd + math.pow(shsd, 2) +
3 * math.pow(tsd, 2)) # (3.2.3)
lambdas_iter = math.sqrt((fy/sjsd_iter) * (siga0sd_iter/fEax + sigm0sd_iter/fEbend +
sigh0sd_iter/fElat + tsd/fEtors))
gammaM_iter = 1 # As taken in the DNVGL sheets
fks_iter = fy / math.sqrt(1 + math.pow(lambdas_iter, 4))
fksd_iter = fks_iter / gammaM_iter
# print('sjsd', sjsd_iter, 'fksd', fksd_iter, 'fks', fks, 'gammaM', gammaM_iter, 'lambdas_iter', lambdas_iter)
this_val = sjsd_iter / fksd_iter
logger.append(sasd_iter)
if this_val > 1.0 or count == 1e6:
found = True
count += 1
if this_val >0.98:
sasd_iter -= 0.5
elif this_val > 0.95:
sasd_iter -= 1
elif this_val > 0.9:
sasd_iter -= 2
elif this_val > 0.7:
sasd_iter -= 10
else:
sasd_iter -= 20
return 0 if len(logger) == 1 else max(logger[-2],0)
provide_data['max axial stress - 3.4.2 Shell buckling'] = iter_table_2()
provide_data['shsd'] = shsd
return provide_data
def ring_stiffened_shell(self, data_shell_buckling = None, column_buckling_data = None):
E = self._E/1e6
t = self._Shell.thk*1000
s = min([self._Shell.dist_between_rings, 2*math.pi*self._Shell.radius])*1000 if self._LongStf == None else \
self._LongStf.s
r = self._Shell.radius*1000
l = self._Shell.dist_between_rings * 1000
fy = self._yield/1e6
L = self._Shell.tot_cyl_length*1000
LH = L
sasd = self._sasd/1e6
smsd = self._smsd/1e6
tsd = abs(self._tTsd/1e6 + self._tQsd/1e6) # MAYBE MAYBE NOT.
psd = self._psd/1e6
data_shell_buckling = self.shell_buckling() if data_shell_buckling == None else data_shell_buckling
#Pnt. 3.5: Ring stiffened shell
# Pnt. 3.5.2.1 Requirement for cross-sectional area:
#Zl = self._Shell.get_Zl()
Zl = math.pow(l, 2) * math.sqrt(1 - math.pow(self._v, 2)) / (r * t) if r * t > 0 else 0
Areq = np.nan if Zl == 0 else (2/math.pow(Zl,2)+0.06)*l*t
Areq = np.array([Areq, Areq])
Astf = np.nan if self._RingStf is None else self._RingStf.get_cross_section_area(include_plate=False)*1000**2
Aframe = np.nan if self._RingFrame is None else \
self._RingFrame.get_cross_section_area(include_plate=False) * 1000 ** 2
A = np.array([Astf, Aframe])
uf_cross_section = Areq/A
#Pnt. 3.5.2.3 Effective width calculation of shell plate
lef = 1.56*math.sqrt(r*t)/(1+12*t/r)
lef_used = np.array([min([lef, LH]), min([lef, LH])])
#Pnt. 3.5.2.4 Required Ix for Shell subject to axial load
A_long_stf = 0 if self._LongStf is None else self._LongStf.get_cross_section_area(include_plate=False)*1000**2
alfaA = 0 if s*t <= 0 else A_long_stf/(s*t)
r0 = np.array([data_shell_buckling['parameters'][0][5], data_shell_buckling['parameters'][1][5]])
worst_ax_comp = min([sasd+smsd, sasd-smsd])
Ixreq = np.array([abs(worst_ax_comp) * t * (1 + alfaA) * math.pow(r0[0], 4) / (500 * E * l),
abs(worst_ax_comp) * t * (1 + alfaA) * math.pow(r0[1], 4) / (500 * E * l)])
#Pnt. 3.5.2.5 Required Ixh for shell subjected to torsion and/or shear:
Ixhreq = np.array([math.pow(tsd / E, (8 / 5)) * math.pow(r0[0] / L, 1 / 5) * L * r0[0] * t * l,
math.pow(tsd / E, (8 / 5)) * math.pow(r0[1] / L, 1 / 5) * L * r0[1] * t * l])
#Pnt. 3.5.2.6 Simplified calculation of Ih for shell subjected to external pressure
zt = np.array([data_shell_buckling['parameters'][0][6],data_shell_buckling['parameters'][1][6]])
rf = np.array([data_shell_buckling['parameters'][0][4], data_shell_buckling['parameters'][1][4]])
delta0 = r*self._delta0
fb_ring_req_val = np.array([0 if self._RingStf is None else 0.4*self._RingStf.tw*math.sqrt(E/fy),
0 if self._RingFrame is None else 0.4*self._RingFrame.tw*math.sqrt(E/fy)])
# if self._RingStf.get_stiffener_type() == 'FB':
# fb_ring_req = fb_ring_req_val[0] > self._RingStf.hw
# else:
# fb_ring_req = np.NaN
flanged_rf_req_h_val = np.array([0 if self._RingStf is None else 1.35*self._RingStf.tw*math.sqrt(E/fy),
0 if self._RingFrame is None else 1.35*self._RingFrame.tw*math.sqrt(E/fy)])
# if self._RingFrame.get_stiffener_type() != 'FB':
# flanged_rf_req_h = flanged_rf_req_h_val[1] > self._RingFrame.hw
# else:
# flanged_rf_req_h = np.NaN
flanged_rf_req_b_val = np.array([0 if self._RingStf is None else 7*self._RingStf.hw/math.sqrt(10+E*self._RingStf.hw/(fy*r)),
0 if self._RingFrame is None else 7*self._RingFrame.hw/math.sqrt(10+E*self._RingFrame.hw/(fy*r))])
# if self._RingFrame.get_stiffener_type() != 'FB':
# flanged_rf_req_b = flanged_rf_req_b_val[1] > self._RingFrame.b
# else:
# flanged_rf_req_b = np.NaN
if self._RingStf is not None:
spf_stf = self._RingStf.hw/fb_ring_req_val[0] if self._RingStf.get_stiffener_type() == 'FB' \
else max([flanged_rf_req_b_val[0]/self._RingStf.b, self._RingStf.hw/flanged_rf_req_h_val[0]])
else:
spf_stf = 0
if self._RingFrame is not None:
spf_frame = self._RingFrame.hw / fb_ring_req_val[1]if self._RingFrame.get_stiffener_type() == 'FB' \
else max([flanged_rf_req_b_val[1] / self._RingFrame.b,self._RingFrame.hw / flanged_rf_req_h_val[1]])
else:
spf_frame = 0
Stocky_profile_factor = np.array([spf_stf, spf_frame])
fT = column_buckling_data['fT_dict']
fT = np.array([fT['Ring Stiff.'] if Stocky_profile_factor[0] > 1 else fy,
fT['Ring Girder'] if Stocky_profile_factor[1] > 1 else fy])
fr_used = np.array([fT[0] if self._fab_method_ring_stf == 1 else 0.9 * fT[0],
fT[1] if self._fab_method_ring_girder == 1 else 0.9 * fT[1]])
shRsd = [abs(val) for val in data_shell_buckling['shRsd']]
Ih = np.array([0 if E*r0[idx]*(fr_used[idx]/2-shRsd[idx]) == 0 else abs(psd)*r*math.pow(r0[idx],2)*l/(3*E)*
(1.5+3*E*zt[idx]*delta0/(math.pow(r0[idx],2)
*(fr_used[idx]/2-shRsd[idx])))
for idx in [0,1]])
# Pnt. 3.5.2.2 Moment of inertia:
IR = [Ih[idx] + Ixhreq[idx] + Ixreq[idx] if all([psd <= 0, Ih[idx] > 0]) else Ixhreq[idx] + Ixreq[idx]
for idx in [0,1]]
Iy = [data_shell_buckling['cross section data'][idx+1][4] for idx in [0,1]]
uf_moment_of_inertia = list()
for idx in [0,1]:
if Iy[idx] > 0:
uf_moment_of_inertia.append(9.999 if fr_used[idx] < 2*shRsd[idx] else IR[idx]/Iy[idx])
else:
uf_moment_of_inertia.append(0)
# Pnt. 3.5.2.7 Refined calculation of external pressure
# parameters.append([alpha, beta, leo, zeta, rf, r0, zt])
I = Iy
Ihmax = [max(0, I[idx]-Ixhreq[idx]-Ixreq[idx]) for idx in [0,1]]
leo = [data_shell_buckling['parameters'][idx][2] for idx in [0,1]]
Ar = A
ih2 = [0 if Ar[idx]+leo[idx]*t == 0 else Ihmax[idx]/(Ar[idx]+leo[idx]*t) for idx in [0,1]]
alfa = [0 if l*t == 0 else 12*(1-math.pow(0.3,2))*Ihmax[idx]/(l*math.pow(t,3)) for idx in [0,1]]
betta = [data_shell_buckling['parameters'][idx][0] for idx in [0,1]]
ZL = [math.pow(L,2)/r/t*math.sqrt(1-math.pow(0.3,2)) for idx in [0,1]]
C1 = [2*(1+alfa[idx])/(1+betta[idx])*(math.sqrt(1+0.27*ZL[idx]/math.sqrt(1+alfa[idx]))-alfa[idx]/(1+alfa[idx]))
for idx in [0,1]]
C2 = [2*math.sqrt(1+0.27*ZL[idx]) for idx in [0,1]]
my = [0 if ih2[idx]*r*leo[idx]*C1[idx] == 0 else
zt[idx]*delta0*rf[idx]*l/(ih2[idx]*r*leo[idx])*(1-C2[idx]/C1[idx])*1/(1-0.3/2) for idx in [0,1]]
fE = np.array([C1[idx]*math.pow(math.pi,2)*E/(12*(1-math.pow(0.3,2)))*(math.pow(t/L,2)) if L > 0
else 0.1 for idx in [0,1]])
fr = np.array(fT)
lambda_2 = fr/fE
lambda_ = np.sqrt(lambda_2)
fk = [0 if lambda_2[idx] == 0 else fr[idx]*(1+my[idx]+lambda_2[idx]-math.sqrt(math.pow(1+my[idx]+lambda_2[idx],2)-
4*lambda_2[idx]))/(2*lambda_2[idx])
for idx in [0,1]]
gammaM = self._mat_factor # LRFD
fkd = [fk[idx]/gammaM for idx in [0,1]]
psd = np.array([0.75*fk[idx]*t*rf[idx]*(1+betta[idx])/(gammaM*math.pow(r,2)*(1-0.3/2)) for idx in [0,1]])
uf_refined = abs((self._psd/1e6))/psd
return np.max([uf_cross_section, uf_moment_of_inertia, uf_refined], axis=0)
def longitudinally_stiffened_shell(self, column_buckling_data = None, unstiffened_shell = None):
h = self._Shell.thk*1000 + self._LongStf.hw + self._LongStf.tf
hw = self._LongStf.hw
tw = self._LongStf.tw
b = self._LongStf.b
tf = self._LongStf.tf
E = self._E/1e6
t = self._Shell.thk*1000
s = max([self._Shell.dist_between_rings, 2*math.pi*self._Shell.radius])*1000 if self._LongStf == None else \
self._LongStf.s
v = self._v
r = self._Shell.radius*1000
l = self._Shell.dist_between_rings * 1000
fy = self._yield/1e6
L = self._Shell.tot_cyl_length*1000
LH = L
sasd = self._sasd/1e6
smsd = self._smsd/1e6
tsd = abs(self._tTsd/1e6 + self._tQsd/1e6)
psd = self._psd/1e6
shsd = unstiffened_shell['shsd']
lightly_stf = s/t > math.sqrt(r/t)
provide_data = dict()
'''
Selections for: Type of Structure Geometry:
1 Unstiffened shell (Force input)
2 Unstiffened panel (Stress input)
3 Longitudinal Stiffened shell (Force input)
4 Longitudinal Stiffened panel (Stress input)
5 Ring Stiffened shell (Force input)
6 Ring Stiffened panel (Stress input)
7 Orthogonally Stiffened shell (Force input)
8 Orthogonally Stiffened panel (Stress input)
Selected:
3 Longitudinal Stiffened shell (Force input)
'''
# Pnt. 3.3 Unstifffed curved panel
geometry = self._geometry
data = unstiffened_shell if unstiffened_shell is not None else self.unstiffened_shell()
if geometry == 1:
fks = data['fks - Unstifffed circular cylinders']
else:
fks = data['fks - Unstifffed curved panel']
sxSd =min([sasd+smsd, sasd-smsd])
sjsd = math.sqrt(math.pow(sxSd,2) - sxSd*shsd + math.pow(shsd,2) + 3 * math.pow(tsd, 2))
Se = (fks*abs(sxSd) / (sjsd*fy))*s
# Moment of inertia
As = A = hw*tw + b*tf # checked
num_stf = math.floor(2*math.pi*r/s)
e= (hw*tw*(hw/2) + b*tf*(hw+tf/2)) / (hw*tw+b*tw)
Istf = h*math.pow(tw,3)/12 + tf*math.pow(b, 3)/12
dist_stf = r - t / 2 - e
Istf_tot = 0
angle = 0
for stf_no in range(num_stf):
Istf_tot += Istf + As*math.pow(dist_stf*math.cos(angle),2)
angle += 2*math.pi/num_stf
# Ishell = (math.pi/4) * ( math.pow(r+t/2,4) - math.pow(r-t/2,4))
# Itot = Ishell + Istf_tot # Checked
Iy = self._LongStf.get_moment_of_intertia(efficent_se=Se/1000, tf1=self._Shell.thk)*1000**4
alpha = 12*(1-math.pow(v,2))*Iy/(s*math.pow(t,3))
Zl = (math.pow(l, 2)/(r*t)) * math.sqrt(1-math.pow(v,2))
#|1print('Zl', Zl, 'alpha', alpha, 'Isef', Iy, 'Se', Se, 'sjsd', sjsd, 'sxsd', sxSd, 'fks', fks, 'As', As)
# Table 3-3
def table_3_3(chk):
psi = {'Axial stress': 0 if Se == 0 else (1+alpha) / (1+A/(Se*t)),
'Torsion and shear stress': 5.54+1.82*math.pow(l/s, 4/3) * math.pow(alpha, 1/3),
'Lateral Pressure': 2*(1+math.sqrt(1+alpha))} # ψ
epsilon = {'Axial stress': 0.702*Zl,
'Torsion and shear stress': 0.856*math.pow(Zl, 3/4),
'Lateral Pressure': 1.04*math.sqrt(Zl)} # ξ
rho = {'Axial stress': 0.5,
'Torsion and shear stress': 0.6,
'Lateral Pressure': 0.6}
return psi[chk], epsilon[chk], rho[chk]
vals = list()
for chk in ['Axial stress', 'Torsion and shear stress','Lateral Pressure']:
psi, epsilon, rho = table_3_3(chk=chk)
C = 0 if psi == 0 else psi * math.sqrt(1 + math.pow(rho * epsilon / psi, 2)) # (3.4.2) (3.6.4)
fE = C * ((math.pow(math.pi, 2) * E) / (12 * (1 - math.pow(v, 2)))) * math.pow(t / l,2)
vals.append(fE)
#print(chk, 'C', C, 'psi', psi,'epsilon', epsilon,'rho' ,rho, 'fE', fE)
fEax, fEtors, fElat = vals
#Torsional Buckling can be excluded as possible failure if:
if self._LongStf._stiffener_type == 'FB':
chk_fb = hw <= 0.4*tw*math.sqrt(E/fy)
data_col_buc = column_buckling_data
fy_used = fy if data_col_buc['lambda_T'] <= 0.6 else data_col_buc['fT']
sasd = sasd*(A+s*t)/(A+Se*t) if A+Se*t>0 else 0
smsd = smsd * (A + s * t) / (A + Se * t) if A + Se * t > 0 else 0
sa0sd = -sasd if sasd < 0 else 0
sm0sd = -smsd if smsd < 0 else 0
sh0sd = -shsd if shsd < 0 else 0
#print('fy_used', fy_used,'sasd', sasd,'shsd', shsd, 'tsd', tsd)
sjsd_panels = math.sqrt(math.pow(sasd+smsd,2)-(sasd+smsd)*shsd + math.pow(shsd,2)+ 3*math.pow(tsd,2))
worst_axial_comb = min(sasd-smsd,sasd+smsd)
sjsd_shells = math.sqrt(math.pow(worst_axial_comb,2)-worst_axial_comb*shsd +math.pow(shsd,2)+3*math.pow(tsd,2))
sxsd_used = worst_axial_comb
provide_data['sxsd_used'] = sxsd_used
sjsd_used = sjsd_panels if self._geometry in [2,6] else sjsd_shells
provide_data['sjsd_used'] = sjsd_used
lambda_s2_panel = fy_used/sjsd_panels*((sa0sd+sm0sd)/fEax+sh0sd/fElat+tsd/fEtors) if\
sjsd_panels*fEax*fEtors*fElat>0 else 0
lambda_s2_shell = fy_used/sjsd_shells*(max(0,-worst_axial_comb)/fEax+sh0sd/fElat+tsd/fEtors) if\
sjsd_shells*fEax*fEtors*fElat>0 else 0
shell_type = 2 if self._geometry in [1,5] else 1
lambda_s = math.sqrt(lambda_s2_panel) if shell_type == 1 else math.sqrt(lambda_s2_shell)
fks = fy_used/math.sqrt(1+math.pow(lambda_s,4))
#print('tsd',tsd, 'sasd', sasd, 'sjsd panels', sjsd_panels, 'fy_used', fy_used, 'lambda_T',data_col_buc['lambda_T'] )
if lambda_s < 0.5:
gammaM = self._mat_factor
else:
if self._mat_factor == 1.1:
if lambda_s > 1:
gammaM = 1.4
else:
gammaM = 0.8+0.6*lambda_s
elif self._mat_factor == 1.15:
if lambda_s > 1:
gammaM = 1.45
else:
gammaM = 0.85+0.6*lambda_s
else:
if lambda_s > 1:
gammaM = 1.45 * (self._mat_factor/1.15)
else:
gammaM = 0.85+0.6*lambda_s * (self._mat_factor/1.15)
if self._uls_or_als == 'ALS':
gammaM = gammaM/self._mat_factor
# Design buckling strength:
fksd = fks/gammaM
provide_data['fksd'] = fksd
# print('fksd', fksd, 'fks', fks, 'gammaM', gammaM, 'lambda_s', lambda_s, 'lambda_s^2 panel',
# lambda_s2_panel, 'sjsd', sjsd_used, 'worst_axial_comb',worst_axial_comb, 'sm0sd',sm0sd)
#print(' ')
return provide_data
@staticmethod
def get_Itot(hw, tw, b, tf, r, s, t):
h = t+hw+tf
As = hw*tw + b*tf # checked
if As != 0:
num_stf = math.floor(2*math.pi*r/s)
e= (hw*tw*(hw/2) + b*tf*(hw+tf/2)) / (hw*tw+b*tw)
Istf = h*math.pow(tw,3)/12 + tf*math.pow(b, 3)/12
dist_stf = r - t / 2 - e
Istf_tot = 0
angle = 0
for stf_no in range(num_stf):
Istf_tot += Istf + As*math.pow(dist_stf*math.cos(angle),2)
angle += 2*math.pi/num_stf
else:
Istf_tot = 0
Ishell = (math.pi/4) * ( math.pow(r+t/2,4) - math.pow(r-t/2,4))
Itot = Ishell + Istf_tot # Checked
return Itot
def column_buckling(self,shell_bukcling_data = None, unstf_shell_data = None):
geometry = self._geometry
provide_data = dict()
G = 80769.2
if self._LongStf is None:
h = self._Shell.thk*1000
else:
h = self._Shell.thk*1000 + self._LongStf.hw + self._LongStf.tf
hw = 0 if self._LongStf is None else self._LongStf.hw
tw = 0 if self._LongStf is None else self._LongStf.tw
b = 0 if self._LongStf is None else self._LongStf.b
tf = 0 if self._LongStf is None else self._LongStf.tf
E = self._E/1e6
t = self._Shell.thk*1000
s = max([self._Shell.dist_between_rings, 2*math.pi*self._Shell.radius])*1000 if self._LongStf == None else \
self._LongStf.s
v = self._v
r = self._Shell.radius*1000
l = self._Shell.dist_between_rings * 1000
fy = self._yield/1e6
L = self._Shell.tot_cyl_length*1000
LH = L
Lc = max([L, LH])
sasd = self._sasd/1e6
smsd = self._smsd/1e6
tsd = abs(self._tTsd/1e6 + self._tQsd/1e6)
psd = self._psd/1e6
shsd = psd * r / t
shell_buckling_data = self.shell_buckling(unstiffened_cylinder=unstf_shell_data) if\
shell_bukcling_data is None else shell_bukcling_data
data = self.unstiffened_shell() if unstf_shell_data is None else unstf_shell_data
idx = 1
param_map = {'Ring Stiff.': 0,'Ring Girder': 1}
fT_dict = dict()
for key, obj in {'Longitudinal stiff.': self._LongStf, 'Ring Stiff.': self._RingStf,
'Ring Girder': self._RingFrame}.items():
if obj is None:
idx += 1
continue
gammaM = data['gammaM circular cylinder'] if self._geometry > 2 else \
data['gammaM curved panel']
sjsd = shell_buckling_data['sjsd'][idx-1]
this_s = 0 if self._LongStf is None else self._LongStf.s
if any([self._geometry in [1, 5], this_s > (self._Shell.dist_between_rings * 1000)]):
fksd = data['fksd - Unstifffed circular cylinders']
else:
fksd = data['fksd - Unstifffed curved panel']
fks = fksd * gammaM
eta = sjsd/fks
hw = obj.hw
tw = obj.tw
if key == 'Longitudinal stiff.':
s_or_leo = obj.s
lT = l
else:
s_or_leo = shell_buckling_data['parameters'][param_map[key]][2]
lT = math.pi*math.sqrt(r*hw)
C = hw/s_or_leo*math.pow(t/tw,3)*math.sqrt(1-min([1,eta])) if s_or_leo*tw>0 else 0
beta = (3*C+0.2)/(C+0.2)
#parameters.append([alpha, beta, leo, zeta])
hs, It, Iz, Ipo, Iy = shell_buckling_data['cross section data'][idx - 1]
if obj.get_stiffener_type() == 'FB':
Af = obj.tf * obj.b
Aw = obj.hw * obj.tw
fEt = beta * (Aw + math.pow(obj.tf / obj.tw, 2) * Af) / (Aw + 3 * Af) * G * math.pow(obj.tw / hw,
2) + math.pow(
math.pi, 2) \
* E * Iz / ((Aw / 3 + Af) * math.pow(lT, 2))
else:
hs, It, Iz, Ipo, Iy = shell_buckling_data['cross section data'][idx-1]
fEt = beta * G * It / Ipo + math.pow(math.pi, 2) * E * math.pow(hs, 2) * Iz / (Ipo * math.pow(lT, 2))
lambdaT = math.sqrt(fy/fEt)
mu = 0.35*(lambdaT-0.6)
fT = (1+mu+math.pow(lambdaT,2)-math.sqrt(math.pow(1+mu+math.pow(lambdaT,2),2)-4*math.pow(lambdaT,2)))\
/(2*math.pow(lambdaT,2))*fy if lambdaT > 0.6 else fy
# General
if key == 'Longitudinal stiff.':
#print('Column buckling', 'fET', fEt, 'mu', mu, 'lambdaT', lambdaT, 'hs', hs, 'It', It, 'Iz', Iz ,'Ipo', Ipo)
provide_data['lambda_T'] = lambdaT
provide_data['fT'] = fT
fT_dict[key] = fT
idx += 1
# if key == 'Ring Stiff.':
# print(hs, It, Iz, Ipo, Iy)
# print('hello')
provide_data['fT_dict'] = fT_dict
# Moment of inertia
As = A = hw*tw + b*tf # checked
num_stf = math.floor(2*math.pi*r/s)
Atot = As*num_stf + 2*math.pi*r*t
e= (hw*tw*(hw/2) + b*tf*(hw+tf/2)) / (hw*tw+b*tw)
Istf = h*math.pow(tw,3)/12 + tf*math.pow(b, 3)/12
dist_stf = r - t / 2 - e
Istf_tot = 0
angle = 0
for stf_no in range(num_stf):
Istf_tot += Istf + As*math.pow(dist_stf*math.cos(angle),2)
angle += 2*math.pi/num_stf
Ishell = (math.pi/4) * ( math.pow(r+t/2,4) - math.pow(r-t/2,4))
Itot = Ishell + Istf_tot # Checked
k_factor = self._Shell.k_factor
col_test =math.pow(k_factor*Lc/math.sqrt(Itot/Atot),2) >= 2.5*E/fy
provide_data['Need to check column buckling'] = col_test
# print("Column buckling should be assessed") if col_test else \
# print("Column buckling does not need to be checked")
#Sec. 3.8.2 Column buckling strength:
fEa = data['fEax - Unstifffed circular cylinders']
#fEa = any([geometry in [1,5], s > l])
fEh = data['fEh - Unstifffed circular cylinders - Psi=4']
# Special case: calculation of fak for unstiffened shell:
# General case:
use_fac = 1 if geometry < 3 else 2
if use_fac == 1:
a = 1 + math.pow(fy, 2) / math.pow(fEa, 2)
b = ((2 * math.pow(fy, 2) / (fEa * fEh)) - 1) * shsd
c = math.pow(shsd, 2) + math.pow(fy, 2) * math.pow(shsd, 2) / math.pow(fEh, 2) - math.pow(fy, 2)
fak = 0 if b == 0 else (b + math.sqrt(math.pow(b, 2) - 4 * a * c)) / (2 * a)
elif any([geometry in [1,5], s > l]):
fak = data['max axial stress - 3.4.2 Shell buckling']
else:
fak = data['max axial stress - 3.3 Unstifffed curved panel']
i = Itot/Atot
fE = 0.0001 if Lc*k_factor == 0 else E*math.sqrt(math.pi*i / (Lc * k_factor))
Lambda_ = 0 if fE == 0 else math.sqrt(fak/fE)
fkc = (1-0-28*math.pow(Lambda_,2))*fak if Lambda_ <= 1.34 else fak/math.pow(Lambda_,2)
gammaM = data['gammaM curved panel'] #self._mat_factor # Check
fakd = fak/gammaM
fkcd = fkc/gammaM
sa0sd = -sasd if sasd<0 else 0
if fakd*fkcd > 0:
stab_chk = sa0sd/fkcd + (abs(smsd) / (1-sa0sd/fE))/fakd <= 1
else:
stab_chk = True
#print("Stability requirement satisfied") if stab_chk else print("Not acceptable")
# Sec. 3.9 Torsional buckling: moved to the top
# Stiffener check
stf_req_h = list()
for idx, obj in enumerate([self._LongStf, self._RingStf, self._RingFrame]):
if obj is None:
stf_req_h.append(np.nan)
else:
stf_req_h.append(0.4*obj.tw*math.sqrt(E/fy) if obj.get_stiffener_type() == 'FB'
else 1.35*obj.tw*math.sqrt(E/fy))
stf_req_h = np.array(stf_req_h)
stf_req_b = list()
for idx, obj in enumerate([self._LongStf, self._RingStf, self._RingFrame]):
if obj is None:
stf_req_b.append(np.nan)
else:
stf_req_b.append(np.nan if obj.get_stiffener_type() == 'FB' else 0.4*obj.tf*math.sqrt(E/fy))
bf = list()
for idx, obj in enumerate([self._LongStf, self._RingStf, self._RingFrame]):
if obj is None:
bf.append(np.nan)
elif obj.get_stiffener_type() == 'FB':
bf.append(obj.b)
elif obj.get_stiffener_type() == 'T':
bf.append((obj.b-obj.tw)/2)
else:
bf.append(obj.b-obj.tw)
bf = np.array(bf)
hw_div_tw = list()
for idx, obj in enumerate([self._RingStf, self._RingFrame]):
if obj is None:
hw_div_tw.append(np.nan)
else:
hw_div_tw.append(obj.hw/obj.tw)
hw_div_tw = np.array(hw_div_tw)
#parameters - [alpha, beta, leo, zeta, rf, r0, zt]
req_hw_div_tw = list()
for idx, obj in enumerate([self._RingStf, self._RingFrame]):
if obj is None:
req_hw_div_tw.append(np.nan)
else:
to_append = np.nan if obj.b*obj.tf == 0 else 2/3*math.sqrt(shell_buckling_data['parameters'][idx][4]
*(obj.tw*obj.hw)*E/
(obj.hw*obj.b*obj.tf*fy))
req_hw_div_tw.append(to_append)
req_hw_div_tw = np.array(req_hw_div_tw)
ef_div_tw = list()
for idx, obj in enumerate([self._RingStf, self._RingFrame]):
if obj is None:
ef_div_tw.append(np.nan)
else:
ef_div_tw.append(obj.get_flange_eccentricity())
ef_div_tw = np.array(ef_div_tw)
ef_div_tw_req = list()
for idx, obj in enumerate([self._RingStf, self._RingFrame]):
if obj is None:
ef_div_tw_req.append(np.nan)
else:
ef_div_tw_req.append(np.nan if obj.b*obj.tf == 0 else
1/3*shell_buckling_data['parameters'][idx][4]/obj.hw*obj.hw*obj.tw/(obj.b*obj.tf))
ef_div_tw_req = np.array(ef_div_tw_req)
#
# print(stf_req_h , '>', np.array([np.nan if self._LongStf is None else self._LongStf.hw,
# np.nan if self._RingStf is None else self._RingStf.hw,
# np.nan if self._RingFrame is None else self._RingFrame.hw]))
# print(stf_req_b , '>', bf)
# print(hw_div_tw , '<', req_hw_div_tw)
# print(ef_div_tw , '<', ef_div_tw_req)
chk1 = stf_req_h>np.array([np.nan if self._LongStf is None else self._LongStf.hw,
np.nan if self._RingStf is None else self._RingStf.hw,
np.nan if self._RingFrame is None else self._RingFrame.hw])
chk1 = [np.nan if np.isnan(val) else chk1[idx] for idx, val in enumerate(stf_req_h)]
chk2 = stf_req_b > bf
chk2 = [np.nan if np.isnan(val) else chk2[idx] for idx, val in enumerate(stf_req_b)]
chk3= hw_div_tw < req_hw_div_tw
chk3 = [np.nan if np.isnan(val) else chk3[idx] for idx, val in enumerate(req_hw_div_tw)]
chk4 = ef_div_tw < ef_div_tw_req
chk4 = [np.nan if np.isnan(val) else chk4[idx] for idx, val in enumerate(ef_div_tw_req)]
provide_data['stiffener check'] = {'longitudinal':all([chk1[0], chk2[0]]),
'ring stiffener': None if self._RingStf is None else
all([chk1[1],chk2[1],chk3[0],chk4[0]]),
'ring frame': None if self._RingFrame is None else
True}
#all([chk1[2],chk2[2],chk3[1],chk4[1]])} SKIP check for girders
provide_data['stiffener check detailed'] = {'longitudinal':'Web height < ' + str(round(stf_req_h[0],1)) if not chk1[0]
else '' + ' ' + 'flange width < ' +str(round(stf_req_b[0],1)) if not chk2[0] else ' ',
'ring stiffener': None if self._RingStf is None
else 'Web height < ' + str(round(stf_req_h[1],1)) if not chk1[1]
else '' + ' ' + 'flange width < ' +str(round(stf_req_b[1],1)) if not chk2[1]
else ' ' + ' ' + 'hw/tw >= ' + str(round(req_hw_div_tw[0],1))
if not chk3[0]
else ''+ ' ' + 'ef/tw >= ' + str(round(ef_div_tw_req[0],1))
if not chk4[0]
else '',
'ring frame': None if self._RingFrame is None
else 'Web height < ' + str(round(stf_req_h[2],1)) if not chk1[2]
else '' + ' ' + 'flange width < ' +str(round(stf_req_b[2],1)) if not chk2[2]
else ' ' + ' ' + 'hw/tw >= ' + str(round(req_hw_div_tw[1],1))
if not chk3[1]
else ''+ ' ' + 'ef/tw >= ' + str(round(ef_div_tw_req[1],1))
if not chk4[1]
else ''}
provide_data['Column stability check'] = stab_chk
return provide_data
def get_all_properties(self):
all_data = {'Main class': self.get_main_properties(),
'Shell': self._Shell.get_main_properties(),
'Long. stf.': None if self._LongStf is None else self._LongStf.get_structure_prop(),
'Ring stf.': None if self._RingStf is None else self.RingStfObj.get_structure_prop(),
'Ring frame': None if self._RingFrame is None else self._RingFrame.get_structure_prop()}
return all_data
def set_all_properties(self, all_prop_dict): # TODO ensure that this is set when optimizing and saving.
all_data = {'Main class': self.set_main_properties(all_prop_dict['Main class']),
'Shell': self._Shell.set_main_properties(all_prop_dict['Shell']),
'Long. stf.': None if self._LongStf is None else
self._LongStf.set_main_properties(all_prop_dict['Long. stf.']),
'Ring stf.': None if self._RingStf is None else
self.RingStfObj.set_main_properties(all_prop_dict['Ring stf.']),
'Ring frame': None if self._RingFrame is None else
self._RingFrame.set_main_properties(all_prop_dict['Ring frame'])}
return all_data
def get_main_properties(self):
main_dict = {'sasd': [self._sasd, 'Pa'],
'smsd': [self._smsd, 'Pa'],
'tTsd': [abs(self._tTsd), 'Pa'],
'tQsd': [self._tQsd, 'Pa'],
'psd': [self._psd, 'Pa'],
'shsd': [self._shsd, 'Pa'],
'geometry': [self._geometry, ''],
'material factor': [self._mat_factor, ''],
'delta0': [self._delta0, ''],
'fab method ring stf': [self._fab_method_ring_stf, '-'],
'fab method ring girder': [self._fab_method_ring_girder, '-'],
'E-module': [self._E, 'Pa'],
'poisson': [self._v, '-'],
'mat_yield': [self._yield, 'Pa'],
'length between girders': [self._length_between_girders, 'm'],
'panel spacing, s': [self._panel_spacing, 'm'],
'ring stf excluded': [self.__ring_stiffener_excluded, ''],
'ring frame excluded': [self.__ring_frame_excluded, ''],
'end cap pressure': [self._end_cap_pressure_included, ''],
'ULS or ALS':[self._uls_or_als, '']}
return main_dict
def set_stresses_and_pressure(self, val):
self._sasd = val['sasd']
self._smsd = val['smsd']
self._tTsd = abs(val['tTsd'])
self._tQsd= val['tQsd']
self._psd = val['psd']
self._shsd = val['shsd']
def get_x_opt(self):
'''
shell (0.02, 2.5, 5, 5, 10, nan, nan, nan),
long (0.875, nan, 0.3, 0.01, 0.1, 0.01, nan, stiffener_type)),
ring (nan, nan, 0.3, 0.01, 0.1, 0.01, nan, stiffener_type)),
ring (nan, nan, 0.7, 0.02, 0.2, 0.02, nan, stiffener_type))]
(self._spacing, self._plate_th, self._web_height, self._web_th, self._flange_width,
self._flange_th, self._span, self._girder_lg, self._stiffener_type)
'''
shell = [self._Shell.thk, self._Shell.radius, self._Shell.dist_between_rings, self._Shell.length_of_shell,
self._Shell.tot_cyl_length, np.nan, np.nan, np.nan]
if self._LongStf is not None:
long = [self._LongStf.s/1000, np.nan, self._LongStf.hw/1000, self._LongStf.tw/1000, self._LongStf.b/1000,
self._LongStf.tf/1000, np.nan, self._LongStf.stiffener_type]
else:
long = [0 for dummy in range(8)]
if self._RingStf is not None:
ring_stf = [self._RingStf.s/1000, np.nan, self._RingStf.hw/1000, self._RingStf.tw/1000, self._RingStf.b/1000,
self._RingStf.tf/1000, np.nan, self._RingStf.stiffener_type]
else:
ring_stf = [0 for dummy in range(8)]
if self._RingFrame is not None:
ring_fr = [self._RingFrame.s/1000, np.nan, self._RingFrame.hw/1000, self._RingFrame.tw/1000, self._RingFrame.b/1000,
self._RingFrame.tf/1000, np.nan, self._RingFrame.stiffener_type]
else:
ring_fr = [0 for dummy in range(8)]
return [shell, long, ring_stf, ring_fr]
class CalcFatigue(Structure):
'''
This Class does the calculations for the plate fields.
Input is a structure object (getters from the Structure Class)
'''
def __init__(self, main_dict: dict, fatigue_dict: dict=None):
super(CalcFatigue, self).__init__(main_dict, fatigue_dict)
if fatigue_dict is not None:
self._sn_curve = fatigue_dict['SN-curve']
self._acc = fatigue_dict['Accelerations']
self._weibull = fatigue_dict['Weibull']
self._period = fatigue_dict['Period']
self._k_factor = fatigue_dict['SCF']
self._corr_loc = fatigue_dict['CorrLoc']
self._no_of_cycles = fatigue_dict['n0']
self._design_life = fatigue_dict['Design life']
self._fraction = fatigue_dict['Fraction']
self._case_order = fatigue_dict['Order']
try:
self._dff = fatigue_dict['DFF']
except KeyError:
self._dff = 2
self.fatigue_dict = fatigue_dict
def get_sn_curve(self):
return self._sn_curve
def __get_sigma_ext(self, int_press):
return -0.5*int_press* ((self._spacing / (self._plate_th))**2) * (self._k_factor/1000**2)
def __get_sigma_int(self, ext_press):
return 0.5*ext_press*((self._spacing/(self._plate_th))**2) * (self._k_factor/1000**2)
def __get_range(self, idx, int_press, ext_press):
return 2*math.sqrt(math.pow(self.__get_sigma_ext(ext_press), 2) +
math.pow(self.__get_sigma_int(int_press), 2) +
2*self._corr_loc[idx]*self.__get_sigma_ext(ext_press)
*self.__get_sigma_int(int_press))
def __get_stress_fraction(self,idx, int_press, ext_press):
return self.__get_range(idx, int_press, ext_press) / \
math.pow(math.log(self._no_of_cycles), 1/self._weibull[idx])
def __get_gamma1(self,idx):
return math.exp(gammaln(snc.get_paramter(self._sn_curve,'m1')/self._weibull[idx] + 1))
def __get_gamma2(self,idx):
return math.exp(gammaln(snc.get_paramter(self._sn_curve, 'm2') / self._weibull[idx] + 1))
def get_damage_slope1(self, idx, curve, int_press=0, ext_press=0):
m1, log_a1, k, slope = snc.get_paramter(curve,'m1'), snc.get_paramter(curve,'log a1'),\
snc.get_paramter(curve,'k'), snc.get_paramter(curve,'slope')
cycles = self._design_life*365*24*3600/self._period[idx]
thk_eff = math.log10(max(1,self._plate_th/0.025)) * k
slope_ch = math.exp( math.log( math.pow(10, log_a1-m1*thk_eff)/slope) / m1)
gamma1 = self.__get_gamma1(idx)
weibull = self._weibull[idx]
stress_frac = self.__get_stress_fraction(idx, int_press, ext_press)
# print('Internal pressure: ', int_press)
# print('External pressure: ', ext_press)
# finding GAMMADIST
if stress_frac == 0:
return 0
x, alpha = math.pow(slope_ch/stress_frac, weibull),1 + m1/weibull
gamma_val = gammadist.cdf(x,alpha)
return cycles / math.pow(10, log_a1-m1*thk_eff) * math.pow(stress_frac, m1)*gamma1*(1-gamma_val)\
*self._fraction[idx]
def get_damage_slope2(self, idx, curve, int_press, ext_press):
m2, log_m2, k, slope = snc.get_paramter(curve,'m2'), snc.get_paramter(curve,'log a2'),\
snc.get_paramter(curve,'k'), snc.get_paramter(curve,'slope')
cycles = self._design_life*365*24*3600/self._period[idx]
thk_eff = math.log10(max(1,self._plate_th/25)) * k
slope_ch = math.exp( math.log( math.pow(10, log_m2-m2*thk_eff)/slope) / m2)
gammm2 = self.__get_gamma2(idx)
weibull = self._weibull[idx]
stress_frac = self.__get_stress_fraction(idx, int_press, ext_press)
# finding GAMMADIST
if stress_frac == 0:
return 0
x, alpha = math.pow(slope_ch/stress_frac, weibull),1 + m2/weibull
gamma_val = gammadist.cdf(x,alpha)
return cycles / math.pow(10, log_m2-m2*thk_eff) * math.pow(stress_frac, m2)*gammm2*(gamma_val)\
*self._fraction[idx]
def get_total_damage(self, int_press=(0, 0, 0), ext_press=(0, 0, 0)):
damage = 0
for idx in range(3):
if self._fraction[idx] != 0 and self._period[idx] != 0:
damage += self.get_damage_slope1(idx,self._sn_curve, int_press[idx], ext_press[idx]) + \
self.get_damage_slope2(idx,self._sn_curve, int_press[idx], ext_press[idx])
return damage
def set_commmon_properties(self, fatigue_dict: dict):
''' Setting the fatiuge properties. '''
#self._sn_curve, self.fatigue_dict['SN-curve'] = fatigue_dict['SN-curve'], fatigue_dict['SN-curve']
self._acc, self.fatigue_dict['Accelerations'] = fatigue_dict['Accelerations'], fatigue_dict['Accelerations']
#self._weibull, self.fatigue_dict['Weibull'] = fatigue_dict['Weibull'], fatigue_dict['Weibull']
#self._period, self.fatigue_dict['Period'] = fatigue_dict['Period'], fatigue_dict['Period']
#self._k_factor, self.fatigue_dict['SCF'] = fatigue_dict['SCF'], fatigue_dict['SCF']
#self._corr_loc, self.fatigue_dict['CorrLoc'] = fatigue_dict['CorrLoc'], fatigue_dict['CorrLoc']
self._no_of_cycles, self.fatigue_dict['n0'] = fatigue_dict['n0'], fatigue_dict['n0']
self._design_life, self.fatigue_dict['Design life'] = fatigue_dict['Design life'], fatigue_dict['Design life']
self._fraction, self.fatigue_dict['Fraction'] = fatigue_dict['Fraction'], fatigue_dict['Fraction']
#self._case_order, self.fatigue_dict['Order'] = fatigue_dict['Order'], fatigue_dict['Order']
self._dff, self.fatigue_dict['DFF'] = fatigue_dict['DFF'], fatigue_dict['DFF']
def set_fatigue_properties(self, fatigue_dict: dict):
''' Setting the fatiuge properties. '''
self._sn_curve, self.fatigue_dict['SN-curve'] = fatigue_dict['SN-curve'], fatigue_dict['SN-curve']
self._acc, self.fatigue_dict['Accelerations'] = fatigue_dict['Accelerations'], fatigue_dict['Accelerations']
self._weibull, self.fatigue_dict['Weibull'] = fatigue_dict['Weibull'], fatigue_dict['Weibull']
self._period, self.fatigue_dict['Period'] = fatigue_dict['Period'], fatigue_dict['Period']
self._k_factor, self.fatigue_dict['SCF'] = fatigue_dict['SCF'], fatigue_dict['SCF']
self._corr_loc, self.fatigue_dict['CorrLoc'] = fatigue_dict['CorrLoc'], fatigue_dict['CorrLoc']
self._no_of_cycles, self.fatigue_dict['n0'] = fatigue_dict['n0'], fatigue_dict['n0']
self._design_life, self.fatigue_dict['Design life'] = fatigue_dict['Design life'], fatigue_dict['Design life']
self._fraction, self.fatigue_dict['Fraction'] = fatigue_dict['Fraction'], fatigue_dict['Fraction']
self._case_order, self.fatigue_dict['Order'] = fatigue_dict['Order'], fatigue_dict['Order']
self._dff, self.fatigue_dict['DFF'] = fatigue_dict['DFF'], fatigue_dict['DFF']
def get_fatigue_properties(self):
''' Returning properties as a dictionary '''
return self.fatigue_dict
def get_accelerations(self):
''' Returning tuple of accelerattions.'''
return self._acc
def get_dff(self):
return self._dff
def get_design_life(self):
return self._design_life
class PULSpanel():
'''
Takes care of puls runs
'''
def __init__(self, run_dict: dict = {}, puls_acceptance: float = 0.87, puls_sheet_location: str = None):
super(PULSpanel, self).__init__()
self._all_to_run = run_dict
self._run_results = {}
self._puls_acceptance = puls_acceptance
self._puls_sheet_location = puls_sheet_location
self._all_uf = {'buckling': list(), 'ultimate': list()}
@property
def all_uf(self):
return self._all_uf
@property
def puls_acceptance(self):
return self._puls_acceptance
@puls_acceptance.setter
def puls_acceptance(self, val):
self._puls_acceptance = val
@property
def puls_sheet_location(self):
return self._puls_sheet_location
@puls_sheet_location.setter
def puls_sheet_location(self, val):
self._puls_sheet_location = val
def set_all_to_run(self, val):
self._all_to_run = val
def get_all_to_run(self):
return self._all_to_run
def get_run_results(self):
return self._run_results
def set_run_results(self, val):
self._run_results = val
for key in self._run_results.keys():
if any([key == 'sheet location',type(self._run_results[key]['Buckling strength']) != dict,
type(self._run_results[key]['Ultimate capacity']) != dict]):
continue
if all([type(self._run_results[key]['Buckling strength']['Actual usage Factor'][0]) == float,
type(self._run_results[key]['Ultimate capacity']['Actual usage Factor'][0]) == float]):
self._all_uf['buckling'].append(self._run_results[key]['Buckling strength']['Actual usage Factor'][0])
self._all_uf['ultimate'].append(self._run_results[key]['Ultimate capacity']['Actual usage Factor'][0])
self._all_uf['buckling'] = np.unique(self._all_uf['buckling']).tolist()
self._all_uf['ultimate'] = np.unique(self._all_uf['ultimate']).tolist()
def run_all(self, store_results = False):
'''
Returning following results.:
Identification: name of line/run
Plate geometry: dict_keys(['Length of panel', 'Stiffener spacing', 'Plate thick.'])
Primary stiffeners: dict_keys(['Number of stiffeners', 'Stiffener type', 'Stiffener boundary', 'Stiff. Height',
'Web thick.', 'Flange width', 'Flange thick.', 'Flange ecc.', 'Tilt angle'])
Secondary stiffeners. dict_keys(['Number of sec. stiffeners', 'Secondary stiffener type', 'Stiffener boundary',
'Stiff. Height', 'Web thick.', 'Flange width', 'Flange thick.'])
Model imperfections. dict_keys(['Imp. level', 'Plate', 'Stiffener', 'Stiffener tilt'])
Material: dict_keys(['Modulus of elasticity', "Poisson's ratio", 'Yield stress plate', 'Yield stress stiffener'])
Aluminium prop: dict_keys(['HAZ pattern', 'HAZ red. factor'])
Applied loads: dict_keys(['Axial stress', 'Trans. stress', 'Shear stress', 'Pressure (fixed)'])
Bound cond.: dict_keys(['In-plane support'])
Global elastic buckling: dict_keys(['Axial stress', 'Trans. Stress', 'Trans. stress', 'Shear stress'])
Local elastic buckling: dict_keys(['Axial stress', 'Trans. Stress', 'Trans. stress', 'Shear stress'])
Ultimate capacity: dict_keys(['Actual usage Factor', 'Allowable usage factor', 'Status'])
Failure modes: dict_keys(['Plate buckling', 'Global stiffener buckling', 'Torsional stiffener buckling',
'Web stiffener buckling'])
Buckling strength: dict_keys(['Actual usage Factor', 'Allowable usage factor', 'Status'])
Local geom req (PULS validity limits): dict_keys(['Plate slenderness', 'Web slend', 'Web flange ratio',
'Flange slend ', 'Aspect ratio'])
CSR-Tank requirements (primary stiffeners): dict_keys(['Plating', 'Web', 'Web-flange', 'Flange', 'stiffness'])
:return:
'''
import excel_inteface as pulsxl
iterator = self._all_to_run
newfile = self._puls_sheet_location
my_puls = pulsxl.PulsExcel(newfile, visible=False)
#my_puls.set_multiple_rows(20, iterator)
run_sp, run_up = my_puls.set_multiple_rows_batch(iterator)
my_puls.calculate_panels(sp=run_sp, up=run_up)
#all_results = my_puls.get_all_results()
all_results = my_puls.get_all_results_batch(sp = run_sp, up=run_up)
for id, data in all_results.items():
self._run_results[id] = data
my_puls.close_book(save=False)
self._all_uf = {'buckling': list(), 'ultimate': list()}
for key in self._run_results.keys():
try:
if all([type(self._run_results[key]['Buckling strength']['Actual usage Factor'][0]) == float,
type(self._run_results[key]['Ultimate capacity']['Actual usage Factor'][0]) == float]):
self._all_uf['buckling'].append(self._run_results[key]['Buckling strength']
['Actual usage Factor'][0])
self._all_uf['ultimate'].append(self._run_results[key]['Ultimate capacity']
['Actual usage Factor'][0])
except TypeError:
print('Got a type error. Life will go on. Key for PULS run results was', key)
print(self._run_results[key])
self._all_uf['buckling'] = np.unique(self._all_uf['buckling']).tolist()
self._all_uf['ultimate'] = np.unique(self._all_uf['ultimate']).tolist()
if store_results:
store_path = os.path.dirname(os.path.abspath(__file__))+'\\PULS\\Result storage\\'
with open(store_path+datetime.datetime.now().strftime("%Y%m%d-%H%M%S")+'_UP.json', 'w') as file:
file.write(json.dumps(all_results, ensure_ascii=False))
return all_results
def get_utilization(self, line, method, acceptance = 0.87):
if line in self._run_results.keys():
if method == 'buckling':
if type(self._run_results[line]['Buckling strength']['Actual usage Factor'][0]) == str or \
self._run_results[line]['Buckling strength']['Actual usage Factor'][0] is None:
return None
return self._run_results[line]['Buckling strength']['Actual usage Factor'][0]/acceptance
else:
if type(self._run_results[line]['Ultimate capacity']['Actual usage Factor'][0]) == str or \
self._run_results[line]['Buckling strength']['Actual usage Factor'][0] is None:
return None
return self._run_results[line]['Ultimate capacity']['Actual usage Factor'][0]/acceptance
else:
return None
def get_puls_line_results(self, line):
if line not in self._run_results.keys():
return None
else:
return self._run_results[line]
def get_string(self, line, uf = 0.87):
'''
:param line:
:return:
'''
results = self._run_results[line]
loc_geom = 'Ok' if all([val[0] == 'Ok' for val in results['Local geom req (PULS validity limits)']
.values()]) else 'Not ok'
csr_geom = 'Ok' if all([val[0] == 'Ok' for val in results['CSR-Tank requirements (primary stiffeners)']
.values()]) else 'Not ok'
ret_str = 'PULS results\n\n' +\
'Ultimate capacity usage factor: ' + str(results['Ultimate capacity']['Actual usage Factor'][0]/uf)+'\n'+\
'Buckling strength usage factor: ' + str(results['Buckling strength']['Actual usage Factor'][0]/uf)+'\n'+\
'Local geom req (PULS validity limits): ' + loc_geom + '\n'+\
'CSR-Tank requirements (primary stiffeners): ' + csr_geom
return ret_str
def result_changed(self, id):
if id in self._run_results.keys():
self._run_results.pop(id)
def generate_random_results(self, batch_size: int = 1000, stf_type: str = None):
'''
Genrate random results based on user input.
:return:
'''
'''
Running iterator:
run_dict_one = {'line3': {'Identification': 'line3', 'Length of panel': 4000.0, 'Stiffener spacing': 700.0,
'Plate thickness': 18.0, 'Number of primary stiffeners': 10, 'Stiffener type (L,T,F)': 'T',
'Stiffener boundary': 'C', 'Stiff. Height': 400.0, 'Web thick.': 12.0, 'Flange width': 200.0,
'Flange thick.': 20.0, 'Tilt angle': 0, 'Number of sec. stiffeners': 0,
'Modulus of elasticity': 210000.0, "Poisson's ratio": 0.3, 'Yield stress plate': 355.0,
'Yield stress stiffener': 355.0, 'Axial stress': 101.7, 'Trans. stress 1': 100.0,
'Trans. stress 2': 100.0, 'Shear stress': 5.0, 'Pressure (fixed)': 0.41261,
'In-plane support': 'Int'}}
'''
run_dict = {}
profiles = hlp.helper_read_section_file('bulb_anglebar_tbar_flatbar.csv')
if stf_type is not None:
new_profiles = list()
for stf in profiles:
if stf['stf_type'][0] == stf_type:
new_profiles.append(stf)
profiles = new_profiles
lengths = np.arange(2000,6000,100)
spacings = np.arange(500,900,10)
thks = np.arange(10,25,1)
axstress =transsress1 = transsress2 = shearstress = np.arange(-200,210,10) #np.concatenate((np.arange(-400,-200,10), np.arange(210,410,10)))
pressures = np.arange(0,0.45,0.01)
now = time.time()
yields = np.array([235,265,315,355,355,355,355,390,420,460])
for idx in range(batch_size):
''' Adding 'Stiffener type (L,T,F)': self.stf_type, 'Stiffener boundary': 'C',
'Stiff. Height': self.stf_web_height*1000, 'Web thick.': self.stf_web_thk*1000,
'Flange width': self.stf_flange_width*1000, 'Flange thick.': self.stf_flange_thk*1000}'''
this_id = 'run_' + str(idx) + datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
this_stf = random.choice(profiles)
if random.choice([True, False]):
boundary = 'Int'
else:
boundary = random.choice(['GL', 'GT'])
if random.choice([True, True, True, False]):
stf_boundary = 'C'
else:
stf_boundary = 'S'
#boundary = 'Int'
#stf_boundary = 'C'
yieldstress = np.random.choice(yields)
if random.choice([True, True, True, False]):
transstress1 = np.random.choice(transsress1) # Using same value for trans1 and trans 2
transstress2 = transstress1
else:
transstress1 = np.random.choice(transsress1)
transstress2 = np.random.choice(transsress2)
# run_dict[this_id] = {'Identification': this_id, 'Length of panel': np.random.choice(lengths),
# 'Stiffener spacing': np.random.choice(spacings),
# 'Plate thickness': np.random.choice(thks), 'Number of primary stiffeners': 10,
# 'Stiffener type (L,T,F)': 'F' if this_stf['stf_type'][0] == 'FB' else this_stf['stf_type'][0],
# 'Stiffener boundary': stf_boundary,
# 'Stiff. Height': this_stf['stf_web_height'][0]*1000,
# 'Web thick.': this_stf['stf_web_thk'][0]*1000,
# 'Flange width': 0 if this_stf['stf_type'][0] == 'F'
# else this_stf['stf_flange_width'][0]*1000,
# 'Flange thick.': 0 if this_stf['stf_type'][0] == 'F'
# else this_stf['stf_flange_thk'][0]*1000,
# 'Tilt angle': 0, 'Number of sec. stiffeners': 0,
# 'Modulus of elasticity': 210000, "Poisson's ratio": 0.3,
# 'Yield stress plate':yieldstress, 'Yield stress stiffener': yieldstress,
# 'Axial stress': 0 if boundary == 'GT' else np.random.choice(axstress),
# 'Trans. stress 1': 0 if boundary == 'GL' else transstress1,
# 'Trans. stress 2': 0 if boundary == 'GL' else transstress2,
# 'Shear stress': np.random.choice(shearstress),
# 'Pressure (fixed)': 0 if stf_boundary == 'S' else np.random.choice(pressures),
# 'In-plane support': boundary, 'sp or up': 'SP'}
same_ax = np.random.choice(axstress)
lengths = np.arange(100, 6000, 100)
spacings = np.arange(100, 26000, 100)
thks = np.arange(10, 50, 1)
boundary = random.choice(['GL', 'GT'])
if np.random.choice([True,False,False,False]):
support = ['SS','SS','SS','SS']
elif np.random.choice([True,False,False,False]):
support = ['CL','CL','CL','CL']
else:
support = [np.random.choice(['SS', 'CL']),np.random.choice(['SS', 'CL']),
np.random.choice(['SS', 'CL']),np.random.choice(['SS', 'CL'])]
if np.random.choice([True,False]):
press = 0
else:
press = np.random.choice(pressures)
run_dict[this_id] = {'Identification': this_id, 'Length of plate': np.random.choice(lengths),
'Width of c': np.random.choice(spacings),
'Plate thickness': np.random.choice(thks),
'Modulus of elasticity': 210000, "Poisson's ratio": 0.3,
'Yield stress plate':yieldstress,
'Axial stress 1': 0 if boundary == 'GT' else same_ax,
'Axial stress 2': 0 if boundary == 'GT' else same_ax,
'Trans. stress 1': 0 if boundary == 'GL' else transstress1,
'Trans. stress 2': 0 if boundary == 'GL' else transstress2,
'Shear stress': np.random.choice(shearstress), 'Pressure (fixed)': press,
'In-plane support': boundary,
'Rot left': support[0], 'Rot right': support[1],
'Rot upper': support[2], 'Rot lower': support[3],
'sp or up': 'UP'}
self._all_to_run = run_dict
self.run_all(store_results=True)
print('Time to run', batch_size, 'batches:', time.time() - now)
def main():
import example_data as ex
# PULS = PULSpanel(ex.run_dict, puls_sheet_location=r'C:\Github\ANYstructure\ANYstructure\PULS\PulsExcel_new - Copy (1).xlsm')
# PULS.run_all_multi()
# PULS = PULSpanel(puls_sheet_location=r'C:\Github\ANYstructure\PULS\PulsExcel_new - generator.xlsm')
# for dummy in range(100):
# PULS.generate_random_results(batch_size=10000)
# import example_data as test
# from multiprocessing import Process
#
# queue = multiprocessing.SimpleQueue()
# tasks = ['a', 'b', 'c']
# for name in tasks:
# p = Process(target=f, args=(name,queue))
# p.start()
#
# for task in tasks:
# print(queue.get())
# print('Fatigue test: ')
# my_test = CalcFatigue(test.obj_dict, test.fat_obj_dict)
# print('Total damage: ',my_test.get_total_damage(int_press=(0,0,0), ext_press=(50000, 60000,0)))
# print('')
# print('Buckling test: ')
#
# my_buc = test.get_structure_calc_object()
#
# #print(my_buc.calculate_buckling_all(design_lat_press=100))
# print(my_buc.calculate_slamming_plate(1000000))
# print(my_buc.calculate_slamming_stiffener(1000000))
# print(my_buc.get_net_effective_plastic_section_modulus())
#my_test.get_total_damage(int_press=(0, 0, 0), ext_press=(0, 40000, 0))
for example in [CalcScantlings(ex.obj_dict)]:#, CalcScantlings(ex.obj_dict2), CalcScantlings(ex.obj_dict_L)]:
my_test = example
# my_test = CalcFatigue(example, ex.fat_obj_dict2)
# my_test.get_total_damage(int_press=(0, 0, 0), ext_press=(0, 40000, 0))
# print('Total damage: ', my_test.get_total_damage(int_press=(0, 0, 0), ext_press=(0, 40000, 0)))
# #print(my_test.get_fatigue_properties())
# pressure = 200
# # print(my_test.buckling_local_stiffener())
# # print('SHEAR CENTER: ',my_test.get_shear_center())
# # print('SECTION MOD: ',my_test.get_section_modulus())
# # print('SECTION MOD FLANGE: ', my_test.get_section_modulus()[0])
# # print('SHEAR AREA: ', my_test.get_shear_area())
# # print('PLASTIC SECTION MOD: ',my_test.get_plasic_section_modulus())
# # print('MOMENT OF INTERTIA: ',my_test.get_moment_of_intertia())
# # print('WEIGHT', my_test.get_weight())
# # print('PROPERTIES', my_test.get_structure_prop())
# # print('CROSS AREA', my_test.get_cross_section_area())
# # print()
# #
# # print('EFFICIENT MOMENT OF INTERTIA: ',my_test.get_moment_of_intertia(efficent_se=my_test.get_plate_efficent_b(
# # design_lat_press=pressure)))
# # print('Se: ',my_test.calculate_buckling_all(design_lat_press=pressure,checked_side='s'))
# # print('Se: ', my_test.calculate_buckling_all(design_lat_press=pressure, checked_side='p'))
# # print('MINIMUM PLATE THICKNESS',my_test.get_dnv_min_thickness(pressure))
# # print('MINIMUM SECTION MOD.', my_test.get_dnv_min_section_modulus(pressure))
# print()
# #my_test.cyl_buckling_long_sft_shell()
#
#
#Structure(ex.obj_dict_cyl_ring)
#Structure(ex.obj_dict_cyl_heavy_ring)
# my_cyl = CylinderAndCurvedPlate(main_dict = ex.shell_main_dict2, shell= Shell(ex.shell_dict),
# long_stf= None,#Structure(ex.obj_dict_cyl_long2),
# ring_stf = Structure(ex.obj_dict_cyl_ring2),
# ring_frame= None)#Structure(ex.obj_dict_cyl_heavy_ring2))
# print(my_cyl.get_utilization_factors())
# Prescriptive buckling UPDATED
Plate = CalcScantlings(ex.obj_dict)
Stiffener = CalcScantlings(ex.obj_dict)
Girder = CalcScantlings(ex.obj_dict_heavy)
PreBuc = AllStructure(Plate = Plate, Stiffener = Stiffener, Girder = Girder,
main_dict=ex.prescriptive_main_dict)
#print(Plate)
print(Stiffener)
print(Stiffener.get_moment_of_intertia_hp())
#print(Girder)
#PreBuc.lat_press = 0.412197
#print(Plate)
# print(Plate)
#print(Stiffener)
# print(Girder)
#print(PreBuc.get_main_properties())
#print(PreBuc.plate_buckling())
if __name__ == '__main__':
main() | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/calc_structure.py | calc_structure.py |
ex1 = {'Identification': 'Panel 1', 'Length of panel': 3000, 'Stiffener spacing': 700, 'Plate thickness': 15,
'Number of primary stiffeners': 20, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C',
'Stiff. Height': 350, 'Web thick.': 12,'Flange width': 150, 'Flange thick.': 18, 'Tilt angle': 0,
'Number of sec. stiffeners': 0, 'Modulus of elasticity': 2.1e11/1e6, "Poisson's ratio": 0.33,
'Yield stress plate': 355, 'Yield stress stiffener': 355, 'Axial stress': 80, 'Trans. stress 1': 60,
'Trans. stress 2': 50, 'Shear stress': 10, 'Pressure (fixed)': 0.08, 'In-plane support': 'Int'}
ex2 = {'Identification': 'Panel 2', 'Length of panel': 3500, 'Stiffener spacing': 700, 'Plate thickness': 15,
'Number of primary stiffeners': 20, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C',
'Stiff. Height': 350, 'Web thick.': 12,'Flange width': 150, 'Flange thick.': 18, 'Tilt angle': 0,
'Number of sec. stiffeners': 0, 'Modulus of elasticity': 2.1e11/1e6, "Poisson's ratio": 0.33,
'Yield stress plate': 355, 'Yield stress stiffener': 355, 'Axial stress': 80, 'Trans. stress 1': 60,
'Trans. stress 2': 50, 'Shear stress': 10, 'Pressure (fixed)': 0.08, 'In-plane support': 'Int'}
ex3 = {'Identification': 'Panel 3', 'Length of panel': 4000, 'Stiffener spacing': 700, 'Plate thickness': 15,
'Number of primary stiffeners': 20, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C',
'Stiff. Height': 350, 'Web thick.': 12,'Flange width': 150, 'Flange thick.': 18, 'Tilt angle': 0,
'Number of sec. stiffeners': 0, 'Modulus of elasticity': 2.1e11/1e6, "Poisson's ratio": 0.33,
'Yield stress plate': 355, 'Yield stress stiffener': 355, 'Axial stress': 80, 'Trans. stress 1': 60,
'Trans. stress 2': 50, 'Shear stress': 10, 'Pressure (fixed)': 0.08, 'In-plane support': 'Int'}
ex4 = {'Identification': 'Panel 4', 'Length of panel': 3000, 'Stiffener spacing': 700, 'Plate thickness': 15,
'Number of primary stiffeners': 20, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C',
'Stiff. Height': 400, 'Web thick.': 12,'Flange width': 150, 'Flange thick.': 18, 'Tilt angle': 0,
'Number of sec. stiffeners': 0, 'Modulus of elasticity': 2.1e11/1e6, "Poisson's ratio": 0.33,
'Yield stress plate': 355, 'Yield stress stiffener': 355, 'Axial stress': 80, 'Trans. stress 1': 60,
'Trans. stress 2': 50, 'Shear stress': 10, 'Pressure (fixed)': 0.08, 'In-plane support': 'Int'}
ex5 = {'Identification': 'Panel 5', 'Length of panel': 3500, 'Stiffener spacing': 700, 'Plate thickness': 15,
'Number of primary stiffeners': 20, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C',
'Stiff. Height': 350, 'Web thick.': 12,'Flange width': 150, 'Flange thick.': 25, 'Tilt angle': 0,
'Number of sec. stiffeners': 0, 'Modulus of elasticity': 2.1e11/1e6, "Poisson's ratio": 0.33,
'Yield stress plate': 355, 'Yield stress stiffener': 355, 'Axial stress': 80, 'Trans. stress 1': 60,
'Trans. stress 2': 50, 'Shear stress': 10, 'Pressure (fixed)': 0.08, 'In-plane support': 'Int'}
ex6 = {'Identification': 'Panel 6', 'Length of panel': 4000, 'Stiffener spacing': 800, 'Plate thickness': 20,
'Number of primary stiffeners': 20, 'Stiffener type (L,T,F)': 'T', 'Stiffener boundary': 'C',
'Stiff. Height': 350, 'Web thick.': 12,'Flange width': 150, 'Flange thick.': 18, 'Tilt angle': 0,
'Number of sec. stiffeners': 0, 'Modulus of elasticity': 2.1e11/1e6, "Poisson's ratio": 0.33,
'Yield stress plate': 355, 'Yield stress stiffener': 355, 'Axial stress': 80, 'Trans. stress 1': 60,
'Trans. stress 2': 50, 'Shear stress': 10, 'Pressure (fixed)': 0.08, 'In-plane support': 'Int'} | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/example_data_puls.py | example_data_puls.py |
import pathlib
import tkinter as tk
from _tkinter import TclError
from tkinter.ttk import Combobox
import os
try:
import any_files.example_data as test
import any_files.helper as hlp
except ModuleNotFoundError:
import ANYstructure.any_files.example_data as test
import ANYstructure.any_files.helper as hlp
from matplotlib.backends.backend_tkagg import (
FigureCanvasTkAgg, NavigationToolbar2Tk)
# Implement the default Matplotlib key bindings.
from matplotlib.backend_bases import key_press_handler
from matplotlib.figure import Figure
class CreateStructureWindow():
'''
This is the tkinter GUI for defining plate/stiffener properties.
'''
def __init__(self, master, app):
super(CreateStructureWindow, self).__init__()
self._frame = master
self._frame.wm_title("Define structure properties")
self._frame.geometry('1800x900')
self._frame.grab_set()
self._root_dir = os.path.dirname(os.path.abspath(__file__))
if __name__ == '__main__':
self._initial_structure_obj = test.get_structure_calc_object()
self._initial_calc_obj = test.get_structure_calc_object()
self._section_list = []
self._section_objects = []
for section in hlp.helper_read_section_file('bulb_anglebar_tbar_flatbar.csv'):
SecObj = Section(section)
self._section_list = hlp.add_new_section(self._section_list, SecObj)
self._section_objects.append(SecObj)
# m = self._ent_section_list.children['menu']
# m.add_command(label=SecObj.__str__(), command=self.section_choose)
self._clicked_button = ["long stf", "ring stf", "ring frame", "flat long stf", 'flat stf', 'flat girder'][0]
else:
self.app = app
self._clicked_button = app._clicked_section_create# if app._line_is_active else None
try:
if self._clicked_button in ['flat stf', "flat long stf"]:
self._initial_structure_obj = self.app._line_to_struc[app._active_line][0].Stiffener
elif self._clicked_button == 'flat girder':
self._initial_structure_obj = self.app._line_to_struc[app._active_line][5].Girder
elif self._clicked_button in ["long stf"]:
self._initial_structure_obj = self.app._line_to_struc[app._active_line][5].LongStfObj
elif self._clicked_button == "ring stf":
self._initial_structure_obj = self.app._line_to_struc[app._active_line][5].RingStfObj
elif self._clicked_button == "ring frame":
self._initial_structure_obj = self.app._line_to_struc[app._active_line][0].RingFrameObj
else:
self._initial_structure_obj = None
except (KeyError, AttributeError) as error:
self._initial_structure_obj = None
self._section_list = [section.__str__() for section in app._sections]
self._section_objects = app._sections
image_dir = os.path.dirname(__file__) + '\\images\\'
self._opt_runned = False
self._opt_resutls = ()
self._draw_scale = 0.5
self._canvas_dim = (500, 450)
ent_w = 10
start_x, start_y, dx, dy = 20, 70, 60, 33
self._canvas_struc = tk.Canvas(self._frame, width=self._canvas_dim[0], height=self._canvas_dim[1],
background='azure', relief='groove', borderwidth=2)
self.structure_types = ['T','L', 'L-bulb','FB']
self._canvas_struc.place(x=10, y=440)
tk.Label(self._frame, text='-- Define structure properties here --', font='Verdana 15 bold').place(x=10, y=10)
#
# ### Adding matplotlib
# fig, ax = run_section_properties()# Figure(figsize=(4, 4), dpi=100)
# t = np.arange(0, 3, .01)
# #fig.add_subplot(111).plot(t, 2 * np.sin(2 * np.pi * t))
#
# canvas = FigureCanvasTkAgg(fig, master=master) # A tk.DrawingArea.
# canvas.draw()
# canvas.get_tk_widget().place(x=start_x+17*dx, y=start_y+dy )
#
# toolbar = NavigationToolbar2Tk(canvas, master)
# toolbar.update()
# canvas.get_tk_widget().place(x=start_x+17*dx, y=start_y+10*dy )
#
# def on_key_press(event):
# print("you pressed {}".format(event.key))
# key_press_handler(event, canvas, toolbar)
#
# canvas.mpl_connect("key_press_event", on_key_press)
self._new_spacing = tk.DoubleVar()
self._new_pl_thk = tk.DoubleVar()
self._new_web_h = tk.DoubleVar()
self._new_web_thk = tk.DoubleVar()
self._new_fl_w = tk.DoubleVar()
self._new_fl_thk = tk.DoubleVar()
self._new_stiffener_type = tk.StringVar()
self._new_stiffener_filter = tk.StringVar()
self._new_stiffener_filter.set('No filter applied')
self._new_girder_length = tk.DoubleVar()
self._new_section = tk.StringVar()
self._ent_section_list = Combobox(self._frame, values = self._section_list, textvariable = self._new_section,
width = 40)
self._ent_section_list.bind("<<ComboboxSelected>>", self.section_choose)
# self._ent_section_list = tk.OptionMenu(self._frame, self._new_section, command=self.section_choose,
# *['',] if self._section_list == [] else self._section_list)
self._ent_structure_options = tk.OptionMenu(self._frame,self._new_stiffener_type,
command=self.option_choose,*self.structure_types)
self._ent_filter_stf = tk.OptionMenu(self._frame,self._new_stiffener_filter,
command=self.regen_option_menu,*['No filter applied','L-bulb', 'L', 'FB', 'T'])
self._ent_spacing = tk.Entry(self._frame, textvariable=self._new_spacing, width=ent_w)
self._ent_pl_thk = tk.Entry(self._frame, textvariable=self._new_pl_thk, width=ent_w)
self._ent_web_h = tk.Entry(self._frame, textvariable=self._new_web_h, width=ent_w)
self._ent_web_thk = tk.Entry(self._frame, textvariable=self._new_web_thk, width=ent_w)
self._ent_fl_w = tk.Entry(self._frame, textvariable=self._new_fl_w, width=ent_w)
self._ent_fl_thk = tk.Entry(self._frame, textvariable=self._new_fl_thk, width=ent_w)
self._ent_girder_length = tk.Entry(self._frame, textvariable=self._new_girder_length, width=ent_w)
tk.Label(self._frame, text='Stiffener type:', font='Verdana 9 bold').place(x=start_x, y=start_y )
tk.Label(self._frame, text='Girder length (Lg)', font='Verdana 9 bold').place(x=start_x+9*dx,
y=start_y + 15 * dy)
tk.Label(self._frame, text='[m]', font='Verdana 9 bold').place(x=start_x + 14 * dx,y=start_y + 15 * dy)
self._ent_girder_length.place(x=start_x + 12 * dx, y=start_y + 15 * dy)
tk.Label(self._frame, text='[mm]', font='Verdana 9 bold').place(x=start_x+3*dx, y=start_y+dy )
tk.Label(self._frame, text='[mm]', font='Verdana 9 bold').place(x=start_x+3*dx, y=start_y + 2*dy)
tk.Label(self._frame, text='[mm]', font='Verdana 9 bold').place(x=start_x +3*dx, y=start_y + 3*dy)
tk.Label(self._frame, text='[mm]', font='Verdana 9 bold').place(x=start_x+3*dx, y=start_y + 4*dy)
tk.Label(self._frame, text='[mm]', font='Verdana 9 bold').place(x=start_x+3*dx, y=start_y + 5*dy)
tk.Label(self._frame, text='[mm]', font='Verdana 9 bold').place(x=start_x+3*dx, y=start_y + 6*dy)
tk.Label(self._frame, text='Existing sections:', font='Verdana 9 bold').place(x=start_x+4*dx, y=start_y + 6*dy)
tk.Label(self._frame, text='filter ->', font='Verdana 9 bold').place(x=start_x + 4 * dx,
y=start_y + 7 * dy)
self._ent_section_list.place(x=start_x+7*dx, y=start_y + 6*dy)
self._ent_filter_stf.place(x=start_x+5*dx, y=start_y + 7*dy)
tk.Button(self._frame, text='Read section list from file', command=self.read_sections, font='Verdana 10 bold',
bg = 'blue', fg = 'yellow').place(x=start_x+12*dx, y=start_y + 6*dy)
tk.Button(self._frame, text='Load built in sections', command=self.read_sections_built_in, font='Verdana 10 bold',
bg = 'azure', fg = 'black').place(x=start_x+12*dx, y=start_y + 7*dy)
# setting default values
init_dim,init_thk = 0.05,0.002
if self._initial_structure_obj != None:
self._new_stiffener_type.set(self._initial_structure_obj.get_stiffener_type())
self._new_spacing.set(self._initial_structure_obj.get_s()*1000)
self._new_pl_thk.set(self._initial_structure_obj.get_pl_thk()*1000)
self._new_web_h.set(self._initial_structure_obj.get_web_h()*1000)
self._new_web_thk.set(self._initial_structure_obj.get_web_thk()*1000)
self._new_fl_w.set(self._initial_structure_obj.get_fl_w()*1000)
self._new_fl_thk.set(self._initial_structure_obj.get_fl_thk()*1000)
else:
self._new_spacing.set(0)
self._new_pl_thk.set(0)
self._new_web_h.set(0)
self._new_web_thk.set(0)
self._new_fl_w.set(0)
self._new_fl_thk.set(0)
self._new_girder_length.set(10)
self._ent_structure_options.place(x=start_x + dx * 3, y=start_y)
if self._new_spacing.get() != 0:
tk.Label(self._frame, text='Spacing', font='Verdana 9').place(x=start_x, y=start_y + dy)
self._ent_spacing.place(x=start_x + dx * 2, y=start_y+dy)
if self._new_pl_thk.get() != 0:
tk.Label(self._frame, text='Plate thk.', font='Verdana 9').place(x=start_x, y=start_y + 2 * dy)
self._ent_pl_thk.place(x=start_x + dx * 2, y=start_y+2*dy)
if self._new_web_h.get() != 0:
tk.Label(self._frame, text='Web height', font='Verdana 9').place(x=start_x, y=start_y + 3 * dy)
self._ent_web_h.place(x=start_x + dx * 2, y=start_y+3*dy)
if self._new_web_thk.get() != 0:
tk.Label(self._frame, text='Web thk.', font='Verdana 9').place(x=start_x, y=start_y + 4 * dy)
self._ent_web_thk.place(x=start_x + dx * 2, y=start_y+4*dy)
if self._new_fl_w.get() != 0:
tk.Label(self._frame, text='Flange width', font='Verdana 9').place(x=start_x, y=start_y + 5 * dy)
self._ent_fl_w.place(x=start_x + dx * 2, y=start_y+5*dy)
if self._new_fl_thk.get() != 0:
tk.Label(self._frame, text='Flange thk.', font='Verdana 9').place(x=start_x, y=start_y + 6 * dy)
self._ent_fl_thk.place(x=start_x + dx * 2, y=start_y+6*dy)
self._new_spacing.trace('w',self.draw_trace)
self._new_pl_thk.trace('w',self.draw_trace)
self._new_web_h.trace('w',self.draw_trace)
self._new_web_thk.trace('w',self.draw_trace)
self._new_fl_w.trace('w',self.draw_trace)
self._new_fl_thk.trace('w',self.draw_trace)
try:
img_file_name = 'img_stiffened_plate_panel.gif'
if os.path.isfile('images/' + img_file_name):
file_path = 'images/' + img_file_name
else:
file_path = os.path.dirname(os.path.abspath(__file__)) + '/images/' + img_file_name
photo = tk.PhotoImage(file=file_path)
label = tk.Label(self._frame, image=photo)
label.image = photo # keep a reference!
label.place(x=550, y=610)
except TclError:
pass
try:
img_file_name = 'img_T_L_FB.gif'
if os.path.isfile('images/' + img_file_name):
file_path = 'images/' + img_file_name
else:
file_path = os.path.dirname(os.path.abspath(__file__)) + '/images/' + img_file_name
photo_T_L_FB = tk.PhotoImage(file=file_path)
label = tk.Label(self._frame, image=photo_T_L_FB )
label.image = photo_T_L_FB # keep a reference!
label.place(x=270, y=50)
except TclError:
pass
# Close and save depending on input
# "long stf", "ring stf", "ring frame", "flat long stf"
if self._clicked_button is not None:
self.close_and_save = tk.Button(self._frame, text='Click to return section data to ' + self._clicked_button,
command=self.save_and_close, bg='green',
font='Verdana 10 bold', fg='yellow')
self.close_and_save.place(x=start_x + dx * 9, y=start_y + dy * 12)
self.draw_properties()
def regen_option_menu(self, event = None):
self._ent_section_list.destroy()
sections = []
if self._section_list == []:
sections = ['',]
elif self._new_stiffener_filter.get() == 'No filter applied':
sections = self._section_list
else:
for sec_obj in self._section_objects:
if sec_obj.stf_type == self._new_stiffener_filter.get():
sections.append(sec_obj.__str__())
start_x, start_y, dx, dy = 20, 70, 60, 33
# self._ent_section_list = tk.OptionMenu(self._frame, self._new_section, command=self.section_choose,
# *sections)
self._ent_section_list = Combobox(self._frame, values=sections, textvariable=self._new_section, width = 40)
self._ent_section_list.bind("<<ComboboxSelected>>", self.section_choose)
self._ent_section_list.place(x=start_x + 7 * dx, y=start_y + 6 * dy)
pass
def option_choose(self, event):
'''
Action when the option menu is changed.
:param event:
:return:
'''
start_x, start_y, dx, dy = 20, 70, 50, 33
tk.Label(self._frame, text='Spacing', font='Verdana 9').place(x=start_x, y=start_y + dy)
self._ent_spacing.place(x=start_x + dx * 2, y=start_y+dy)
tk.Label(self._frame, text='Plate thk.', font='Verdana 9').place(x=start_x, y=start_y + 2 * dy)
self._ent_pl_thk.place(x=start_x + dx * 2, y=start_y+2*dy)
tk.Label(self._frame, text='Web height', font='Verdana 9').place(x=start_x, y=start_y + 3 * dy)
self._ent_web_h.place(x=start_x + dx * 2, y=start_y+3*dy)
tk.Label(self._frame, text='Web thk.', font='Verdana 9').place(x=start_x, y=start_y + 4 * dy)
self._ent_web_thk.place(x=start_x + dx * 2, y=start_y+4*dy)
if self._new_stiffener_type.get()!='FB':
tk.Label(self._frame, text='Flange width', font='Verdana 9').place(x=start_x, y=start_y + 5 * dy)
self._ent_fl_w.place(x=start_x + dx * 2, y=start_y+5*dy)
else: self._ent_fl_w.place_forget()
if self._new_stiffener_type.get()!='FB':
tk.Label(self._frame, text='Flange thk.', font='Verdana 9').place(x=start_x, y=start_y + 6 * dy)
self._ent_fl_thk.place(x=start_x + dx * 2, y=start_y+6*dy)
else: self._ent_fl_thk.place_forget()
if self._new_stiffener_type.get()=='FB':
self._new_fl_w.set(0)
self._new_fl_thk.set(0)
self.draw_properties()
def checkered(self, line_distance):
'''
Grid lines in the properties canvas.
:param line_distance:
:return:
'''
# vertical lines at an interval of "line_distance" pixel
for x in range(line_distance, self._canvas_dim[0], line_distance):
self._canvas_struc.create_line(x, 0, x, self._canvas_dim[0], fill="grey", stipple='gray50')
# horizontal lines at an interval of "line_distance" pixel
for y in range(line_distance, self._canvas_dim[1], line_distance):
self._canvas_struc.create_line(0, y, self._canvas_dim[0], y, fill="grey", stipple='gray50')
def draw_properties(self):
'''
Drawing properties in the canvas.
:return:
'''
self._canvas_struc.delete('all')
self.checkered(10)
ctr_x = self._canvas_dim[0] / 2
ctr_y = self._canvas_dim[1] / 2 + 200
m = self._draw_scale
init_color, init_stipple = 'blue', 'gray50'
try: spacing = self._new_spacing.get()
except TclError: spacing = 0
try: pl_thk = self._new_pl_thk.get()
except TclError: pl_thk = 0
try: web_h = self._new_web_h.get()
except TclError: web_h = 0
try: web_thk = self._new_web_thk.get()
except TclError: web_thk = 0
try: fl_w = self._new_fl_w.get()
except TclError: fl_w = 0
try: fl_thk = self._new_fl_thk.get()
except TclError: fl_thk = 0
self._canvas_struc.create_rectangle(0, 0, self._canvas_dim[0] + 10, 70, fill='white')
self._canvas_struc.create_text(250, 15, text='Plate: ' + str(spacing ) + 'x' +
str(pl_thk ),font='Verdana 10 bold',fill='black')
self._canvas_struc.create_rectangle(ctr_x - m * spacing / 2, ctr_y,ctr_x + m * spacing / 2,
ctr_y - m * pl_thk, fill='black', stipple=init_stipple)
self._canvas_struc.create_text(250, 35, text='Web: ' + str(web_h ) + 'x'+ str(web_thk )
,font='Verdana 10 bold',fill='blue')
self._canvas_struc.create_rectangle(ctr_x - m * web_thk / 2,ctr_y - m * pl_thk,ctr_x + m * web_thk / 2,
ctr_y - m * (web_h+ pl_thk), fill='blue', stipple=init_stipple)
self._canvas_struc.create_text(250, 55, text='Flange: '+ str(fl_w ) + 'x'+ str(fl_thk ),
font='Verdana 10 bold',fill='red')
if self._new_stiffener_type.get() in ['L', 'L-bulb']:
self._canvas_struc.create_rectangle(ctr_x - m * web_thk / 2, ctr_y- m * (pl_thk + web_h),ctr_x + m * fl_w,
ctr_y - m * (pl_thk + web_h + fl_thk),fill='red', stipple=init_stipple)
else:
self._canvas_struc.create_rectangle(ctr_x - m * fl_w / 2, ctr_y- m * (pl_thk + web_h),ctr_x + m * fl_w / 2,
ctr_y - m * (pl_thk + web_h + fl_thk),fill='red', stipple=init_stipple)
def draw_trace(self,*args):
'''
Updating when values in entered
:param event:
:return:
'''
self.draw_properties()
def save_and_close(self):
'''
Save and close
:return:
'''
if __name__ == '__main__':
self._frame.destroy()
return
self.app.on_close_structure_window([float(num) for num in [self._new_spacing.get(),self._new_pl_thk.get(),
self._new_web_h.get(),self._new_web_thk.get(),
self._new_fl_w.get(),self._new_fl_thk.get()]] +
[self._new_stiffener_type.get(), self._clicked_button])
self._frame.destroy()
def section_choose(self, event = None):
''' Choosing a section. '''
#chosen_section = self._new_section.get()
chosen_section = event.widget.get()
for section in self._section_objects:
if chosen_section == section.__str__():
self._new_web_h.set(section.stf_web_height*1000)
self._new_web_thk.set(section.stf_web_thk*1000)
self._new_fl_w.set(section.stf_flange_width*1000)
self._new_fl_thk.set(section.stf_flange_thk*1000)
self._new_stiffener_type.set(section.stf_type)
self.option_choose(None)
def read_sections(self):
'''
Read a list.
'''
from tkinter import filedialog
import any_files.helper as hlp
from pathlib import Path
file = filedialog.askopenfile('r')
file = Path(file.name)
#m = self._ent_section_list.children['menu']
for section in hlp.helper_read_section_file(file.name):
SecObj = Section(section)
self._section_list = hlp.add_new_section(self._section_list, SecObj)
self._section_objects.append(SecObj)
#m.add_command(label=SecObj.__str__(), command=self.section_choose)
def read_sections_built_in(self):
'''
Read a list.
'''
import any_files.helper as hlp
if pathlib.Path('bulb_anglebar_tbar_flatbar.csv').exists():
libfile = 'bulb_anglebar_tbar_flatbar.csv'
else:
libfile = 'bulb_anglebar_tbar_flatbar.csv'
libfile = self._root_dir + '/' + libfile
for section in hlp.helper_read_section_file(libfile):
SecObj = Section(section)
self._section_list = hlp.add_new_section(self._section_list, SecObj)
self._section_objects.append(SecObj)
#m.add_command(label=SecObj.__str__(), command=self.section_choose)
self.regen_option_menu()
class Section:
'''
Creates a section property.
'stf_type': [self._new_stf_type.get(), ''],
'stf_web_height': [self._new_stf_web_h.get()/1000, 'm'],
'stf_web_thk': [self._new_sft_web_t.get()/1000, 'm'],
'stf_flange_width': [self._new_stf_fl_w.get()/1000, 'm'],
'stf_flange_thk': [self._new_stf_fl_t.get()/1000, 'm'],
'''
def __init__(self, input_dict):
super(Section, self).__init__()
self._stf_type = input_dict['stf_type'] if type(input_dict['stf_type']) != list \
else input_dict['stf_type'][0]
self._stf_web_height = input_dict['stf_web_height']if type(input_dict['stf_web_height']) != list \
else input_dict['stf_web_height'][0]
self._stf_web_thk = input_dict['stf_web_thk']if type(input_dict['stf_web_thk']) != list \
else input_dict['stf_web_thk'][0]
self._stf_flange_width = input_dict['stf_flange_width']if type(input_dict['stf_flange_width']) != list \
else input_dict['stf_flange_width'][0]
self._stf_flange_thk = input_dict['stf_flange_thk']if type(input_dict['stf_flange_thk']) != list \
else input_dict['stf_flange_thk'][0]
def __str__(self):
''' Returning a string. '''
base_name = self.stf_type+ '_' + str(round(self.stf_web_height*1000, 0)) + 'x' + \
str(round(self.stf_web_thk*1000, 0))
if self._stf_type == 'FB':
ret_str = base_name
elif self._stf_type in ['L-bulb', 'bulb', 'hp']:
ret_str = 'Bulb'+str(int(self.stf_web_height*1000 + self.stf_flange_thk*1000))+'x'+\
str(round(self.stf_web_thk*1000, 0))+ '__' +str(round(self.stf_web_height*1000, 0)) + 'x' + \
str(round(self.stf_web_thk*1000, 0))+ str(round(self.stf_flange_width*1000, 0)) + 'x' + \
str(round(self.stf_flange_thk*1000, 0))
else:
ret_str = base_name + '__' + str(round(self.stf_flange_width*1000, 0)) + 'x' + \
str(round(self.stf_flange_thk*1000, 0))
ret_str = ret_str.replace('.', '_')
return ret_str
@property
def stf_type(self):
return self._stf_type
@stf_type.setter
def stf_type(self, value):
self._stf_type = value
@property
def stf_web_height(self):
return self._stf_web_height
@stf_web_height.setter
def stf_web_height(self, value):
self._stf_web_height = value
@property
def stf_web_thk(self):
return self._stf_web_thk
@stf_web_thk.setter
def stf_web_thk(self, value):
self._stf_web_thk = value
@property
def stf_flange_width(self):
return self._stf_flange_width
@stf_flange_width.setter
def stf_flange_width(self, value):
self._stf_flange_width = value
@property
def stf_flange_thk(self):
return self._stf_flange_thk
@stf_flange_thk.setter
def stf_flange_thk(self, value):
self._stf_flange_thk = value
def return_puls_input(self):
'''
Returns as input good for PULS
:return:
'''
return {'Stiffener type (L,T,F)': self.stf_type, 'Stiffener boundary': 'C',
'Stiff. Height': self.stf_web_height*1000,
'Web thick.': self.stf_web_thk*1000, 'Flange width': self.stf_flange_width*1000,
'Flange thick.': self.stf_flange_thk*1000}
# def run_section_properties(pl_s = 0.75, pl_t = 0.015, hw = 0.4, tw = 0.018, bf = 0.15, tf = 0.02):
# import sectionproperties.pre.sections as sections
# from sectionproperties.analysis.cross_section import CrossSection
# from matplotlib import pyplot as plt
#
# # create a 50 diameter circle discretised by 64 points
# geometry = sections.MonoISection(
# d=(pl_t+hw+tf)*1000, b_t=bf*1000, b_b=pl_s*1000, t_ft=tf*1000, t_fb=pl_t*1000, t_w=tw*1000, r=8, n_r=16
# )
# mesh = geometry.create_mesh(mesh_sizes=[3.0])
# section = CrossSection(geometry, mesh) # create a CrossSection object
# mesh_nodes = section.mesh_nodes
# mesh_elements = section.mesh_elements
# # plot the mesh
# (fig, ax) = plt.subplots(figsize=(4, 4), dpi=100)
# ax.triplot(mesh_nodes[:, 0], mesh_nodes[:, 1], mesh_elements[:, 0:3], lw=0.5)
# # #section.display_mesh_info() # display the mesh information
# # ax = section.plot_mesh(pause=True) # plot the generated mesh
# #
# # # perform a geometric, warping and plastic analysis, displaying the time info
# # section.calculate_geometric_properties(time_info=True)
# # section.calculate_warping_properties(time_info=True)
# # section.calculate_plastic_properties(time_info=True)
# #
# # # print the results to the terminal
# # section.display_results()
# #
# # # get the second moments of area and the torsion constant
# # (ixx_c, iyy_c, ixy_c) = section.get_ic()
# # j = section.get_j()
# #
# # # print the sum of the second moments of area and the torsion constant
# # print("Ixx + Iyy = {0:.3f}".format(ixx_c + iyy_c))
# # print("J = {0:.3f}".format(j))
# return fig, ax
#
#
if __name__ == '__main__':
# sec1 = Section({'stf_type': 'T', 'stf_web_height': 0.35, 'stf_web_thk': 0.02, 'stf_flange_width': 0.15,
# 'stf_flange_thk': 0.015})
#
# sec_list = [sec1, Section({'stf_type': 'FB', 'stf_web_height': 0.35, 'stf_web_thk': 0.02, 'stf_flange_width': 0,
# 'stf_flange_thk': 0}), Section({'stf_type': 'T', 'stf_web_height': 0.4, 'stf_web_thk': 0.02,
# 'stf_flange_width': 0.15, 'stf_flange_thk': 0.02})]
#
# hlp.add_new_section(sec_list, sec1)
# run_section_properties()
root = tk.Tk()
my_app = CreateStructureWindow(root, app=None)
root.mainloop() | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/pl_stf_window.py | pl_stf_window.py |
import numpy as np
from scipy import ndimage
class Grid:
"""
Implementation of 2D grid of cells
Includes boundary handling
"""
def __init__(self, grid_height, grid_width):
"""
Initializes grid to be empty, take height and width of grid as parameters
Indexed by rows (left to right), then by columns (top to bottom)
"""
#print('Grid initsialised with', grid_height, grid_width)
self._grid_height = grid_height
self._grid_width = grid_width
self._cells = np.zeros((self._grid_height,self._grid_width))
self.empty, self.full, self.barrier, self.corner = 0, 1, -1, -2
self._geo_info = {'points': None, 'lines': None}
self._compressed_grid = None
self._bfs_search_data = None
@property
def cells(self):
return self._cells
@cells.setter
def cells(self, val):
self._cells = val
@property
def bfs_search_data(self):
return self._bfs_search_data
@bfs_search_data.setter
def bfs_search_data(self, val):
self._bfs_search_data = val
def __str__(self):
"""
Return multi-line string represenation for grid
"""
ans = ""
for row in range(self._grid_height):
ans += str(self._cells[row])
ans += "\n"
return ans
def make_empty_grid(self):
'''
Making a grid of all 0.
:return:
'''
return np.zeros((self._grid_height,self._grid_width))
def get_array(self):
''' Returning the numpy array '''
return self._cells
def provide_line_info(self, lines, points):
''' Line information to the grid.
The geometric infomation is a dictionary
{ line_dict: line_dict,
point_dict: point_dict }
'''
self._geo_info['lines'] = lines
self._geo_info['points'] = points
def get_grid_height(self):
"""
Return the height of the grid for use in the GUI
"""
return self._grid_height
def get_grid_width(self):
"""
Return the width of the grid for use in the GUI
"""
return self._grid_width
def get_matrix(self):
"""
Return the complete matrix in numpy list form.
"""
return self._cells
def get_highest_number_in_grid(self):
'''
Retruns the highes number in the grid.
:return:
'''
return np.amax(self._cells)
def clear(self):
"""
Clears grid to be empty
"""
self._cells = np.zeros((self._grid_height,self._grid_width))
def set_empty(self, row, col):
"""
Set cell with index (row, col) to be empty
"""
self._cells[row][col] = self.empty
def set_full(self, row, col):
"""
Set cell with index (row, col) to be full
"""
self._cells[row][col] = self.full
def set_value(self, row, col, value):
"""
Set cell with index (row, col) to be a specified integer
"""
self._cells[row][col] = value
def set_barrier(self, row, col, line_number: int = None):
"""
Set cell with index (row, col) to be full
"""
if line_number is None:
self._cells[row][col] = self.barrier
else:
self._cells[row][col] = line_number
def set_number_to_cell(self, row, col, number):
'''
Setting an arbritary number to a cell.
'''
self._cells[row][col] = number
def is_empty(self, row, col):
"""
Checks whether cell with index (row, col) is empty
"""
return self._cells[row][col] == self.empty
def is_full(self, row, col):
"""
Checks whether cell with index (row, col) is empty
"""
return self._cells[row][col] == self.full
def is_barrier(self, row, col):
"""
Checks whether cell with index (row, col) is empty
"""
return self._cells[row][col] == self.barrier
def is_corner(self,point):
'''
Identifying corners.
:param point:
:return:
'''
return [self.get_value(item[0],item[1]) for item in
self.eight_neighbors(point[0],point[1])].count(self.barrier) > 4
def four_neighbors_extend_1(self, row, col):
"""
Returns horiz/vert neighbors of cell (row, col)
"""
ans = []
if row > 0:
ans.append((row - 2, col))
if row < self._grid_height - 1:
ans.append((row + 2, col))
if col > 0:
ans.append((row, col - 2))
if col < self._grid_width - 1:
ans.append((row, col + 2))
return ans
def four_neighbors(self, row, col):
"""
Returns horiz/vert neighbors of cell (row, col)
"""
ans = []
if row > 0:
ans.append((row - 1, col))
if row < self._grid_height - 1:
ans.append((row + 1, col))
if col > 0:
ans.append((row, col - 1))
if col < self._grid_width - 1:
ans.append((row, col + 1))
return ans
def eight_neighbors(self, row, col):
"""
Returns horiz/vert neighbors of cell (row, col) as well as
diagonal neighbors
"""
ans = []
if row > 0:
ans.append((row - 1, col))
if row < self._grid_height - 1:
ans.append((row + 1, col))
if col > 0:
ans.append((row, col - 1))
if col < self._grid_width - 1:
ans.append((row, col + 1))
if (row > 0) and (col > 0):
ans.append((row - 1, col - 1))
if (row > 0) and (col < self._grid_width - 1):
ans.append((row - 1, col + 1))
if (row < self._grid_height - 1) and (col > 0):
ans.append((row + 1, col - 1))
if (row < self._grid_height - 1) and (col < self._grid_width - 1):
ans.append((row + 1, col + 1))
return ans
def get_index(self, point, cell_size):
"""
Takes point in screen coordinates and returns index of
containing cell
"""
return (point[1] / cell_size, point[0] / cell_size)
def get_value(self, row, col):
# print('requested ROW COL: ', row, col)
# print('CURRENT CELL LEN: ',len(self._cells), 'Shape is ',self._cells.shape)
#print(self._cells[row])
return self._cells[row][col]
def get_points_along_line(self,start, end):
"""Bresenham's Line Algorithm
Produces a list of tuples from start and end
points1 = get_line((0, 0), (3, 4))
points2 = get_line((3, 4), (0, 0))
assert(set(points1) == set(points2))
print points1
[(0, 0), (1, 1), (1, 2), (2, 3), (3, 4)]
[(3, 4), (2, 3), (1, 2), (1, 1), (0, 0)]
"""
# Setup initial conditions
x1 = int(start[0])
y1 = int(start[1])
x2 = int(end[0])
y2 = int(end[1])
dx = x2 - x1
dy = y2 - y1
# Determine how steep the line is
is_steep = abs(dy) > abs(dx)
# Rotate line
if is_steep:
x1, y1 = y1, x1
x2, y2 = y2, x2
# Swap start and end points if necessary and store swap state
swapped = False
if x1 > x2:
x1, x2 = x2, x1
y1, y2 = y2, y1
swapped = True
# Recalculate differentials
dx = x2 - x1
dy = y2 - y1
# Calculate error
error = int(dx / 2.0)
ystep = 1 if y1 < y2 else -1
# Iterate over bounding box generating points between start and end
y = y1
points = []
for x in range(x1, x2 + 1):
coord = (y, x) if is_steep else (x, y)
points.append(coord)
error -= abs(dy)
if error < 0:
y += ystep
error += dx
# Reverse the list if the coordinates were swapped
if swapped:
points.reverse()
return points
def get_mid_point(self, cell1, cell2):
'''
Get the point that is in the middle between two points.
:param point1:
:param point2:
:return:
'''
idx = int(round(len(self.get_points_along_line(cell1,cell2))/2,0))
return self.get_points_along_line(cell1,cell2)[idx]
def get_adjacent_values(self,cell):
'''
Find the labels in the grid adjacent to the specified point.
:param point:
:return:
'''
# return tuple(set([int(self.get_value(neighbor[0], neighbor[1]))
# for neighbor in self.four_neighbors_extend_1(cell[0], cell[1])]))
return tuple(set([int(self.get_value(neighbor[0], neighbor[1]))
for neighbor in self.four_neighbors(cell[0], cell[1])]))
def get_adjacent_values_duplicates(self,cell):
'''
Find the labels in the grid adjacent to the specified point.
:param point:
:return:
'''
return_tuple = tuple(list([int(self.get_value(neighbor[0], neighbor[1]))
for neighbor in self.four_neighbors(cell[0], cell[1])]))
# return_tuple = tuple(list([int(self.get_value(neighbor[0], neighbor[1]))
# for neighbor in self.four_neighbors_extend_1(cell[0], cell[1])]))
len(tuple(set(return_tuple)))
if len(tuple(set(return_tuple))) > 1:
return set(return_tuple)
else:
return (tuple(set(return_tuple))[0],tuple(set(return_tuple))[0])
def get_highest_cell(self, value):
'''
Get the cell closes to (0,0) with a given value.
:param value:
:return:
'''
highest = (self.get_grid_height(),0)
for row in range(self.get_grid_height()):
for col in range(self.get_grid_width()):
if self.get_value(row,col) == value and row < highest[0]:
highest = (row,col)
return highest
def get_lowest_cell(self,value):
'''
Get the cell closest to (height,0) with a given value.
:param value:
:return:
'''
lowest = (0,0)
for row in range(self.get_grid_height()):
for col in range(self.get_grid_width()):
if self.get_value(row,col) == value and row > lowest[0]:
lowest = (row,col)
return lowest
def get_number_of_cells_with_value(self,value):
'''
Get the number of cells with a certain value.
:param value:
:return:
'''
counter = 0
for row in range(self.get_grid_height()):
for col in range(self.get_grid_width()):
if self.get_value(row,col) == value:
counter += 1
return counter
def import_grid(self,grid):
'''
Import a grid to replace created grid. Converting to numpy array.
:param grid:
:return:
'''
if np.array(grid, dtype=object).shape[1] == 2:
self._cells = self.rebuild_compressed(grid)
else:
self.cells = np.array(grid) # old large save files
def export_grid(self):
'''
Converting from array to list of list. Exporting the grid.
:return:
'''
return self.export_compressed_grid()
def export_compressed_grid(self):
'''
Converting from array to list of list. Exporting the grid for saving.
:return:
'''
save_list = list()
# Compressing horizontally
for row in self._cells:
this_counter, this_number, save_row = 1, row[0], list()
for col_idx in range(len(row)-1):
last = col_idx == len(row)-2
if row[col_idx] == row[col_idx+1] and not last:
this_counter += 1
elif row[col_idx] != row[col_idx+1] and not last:
save_row.append([this_number, this_counter])
this_number = row[col_idx+1]
this_counter = 1
elif last:
save_row.append([this_number, this_counter+1])
save_list.append(save_row)
# Compressing vertically
this_counter, this_number, save_vertical = 0, save_list[0], list()
for row_idx in range(len(save_list) - 1):
last = row_idx == len(save_list) - 2
if save_list[row_idx] == save_list[row_idx +1] and not last:
this_counter += 1
elif save_list[row_idx] != save_list[row_idx +1] and not last:
save_vertical.append([this_number, this_counter])
this_number = save_list[row_idx +1]
this_counter = 1
elif last:
save_vertical.append([this_number, this_counter])
if save_list[row_idx+1] != save_list[row_idx]:
save_vertical.append([save_list[row_idx+1], 1])
if save_vertical[0][-1] != self.cells.shape[0]:
save_vertical[0][-1] += 1
# assert this_counter == self.cells.shape[0], 'Error. Shape of compressed grid is not equal to input grid. ' \
# 'The counter has skipped a step. The saved is '\
# +str(save_vertical) + ' and the shape is ' + \
# str(self.cells.shape)
return save_vertical
def rebuild_compressed(self, compressed_grid = None):
'''
Rebuilding a compressed grid made by 'export_compressed_grid(self)'
:return:
'''
compressed_grid = compressed_grid if compressed_grid is not None else self._compressed_grid
vertical_expansion_list = []
# Expand vertically
for row_count, row in enumerate(compressed_grid):
values = row[0]
value_count = row[1]
for dummy_i in range(value_count):
vertical_expansion_list.append(values)
# Expand horisontally
expanded_list = [list() for dummy_i in range(len(vertical_expansion_list))]
for row_count, row in enumerate(vertical_expansion_list):
for values in row:
value = values[0]
value_count = values[1]
for dummy_i in range(value_count):
expanded_list[row_count].append(value)
#print('Shape of rebuilt grid is',np.array(expanded_list).shape)
return np.array(expanded_list)
def get_center_of_matrix(self, height_limit: float = None, scale: float = 10):
'''
'''
import copy
calc_grid = copy.deepcopy(self.get_array())
all_compartments = np.unique(calc_grid)
calc_grid[calc_grid == 1] = 0
#set all values to a value
for val in all_compartments:
if val != 0:
calc_grid[calc_grid == val] = 1
if height_limit != None:
calc_grid = calc_grid[int(self._grid_height-height_limit*scale):, :]
center_of_mass = ndimage.measurements.center_of_mass(calc_grid)
center_of_mass = ((calc_grid.shape[0]-center_of_mass[0])/scale, center_of_mass[1]/scale)
return center_of_mass
if __name__ == '__main__':
import example_data as ex
import grid_window as grd
from matplotlib import pyplot as plt
lines = ex.line_dict
points = ex.point_dict
canvas_dim = [1000,720]
canvas_origo = (50,670)
empty_grid = np.zeros((720, 1000))
my_grid = grd.CreateGridWindow(ex.get_grid_no_inp(empty_grid=False), canvas_dim, {}, canvas_origo)
print('before search', my_grid.grid.get_array().shape)
search_return = my_grid.search_bfs(animate = True)
print('after search', my_grid.grid.get_array().shape)
my_grid.grid.get_center_of_matrix(height_limit=5) | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/make_grid_numpy.py | make_grid_numpy.py |
import tkinter as tk
try:
import any_files.example_data as test
import any_files.SN_curve_parameters as sn
except ModuleNotFoundError:
import ANYstructure.any_files.example_data as test
import ANYstructure.any_files.SN_curve_parameters as sn
class CreateFatigueWindow():
'''
This class initiates the GUI used to define stresses for the selected structure.
'''
def __init__(self, master, app=None):
super(CreateFatigueWindow, self).__init__()
if __name__ == '__main__':
self._initial_structure_obj = test.get_structure_object()
self.load_objects = [test.get_loa_fls_load(),test.get_loa_uls_load(),
test.get_bal_fls_load(), test.get_bal_uls_load()]
self.active_line = 'line1'
points = test.line_dict['line1']
coords = (test.point_dict['point'+str(points[0])], test.point_dict['point'+str(points[1])])
self.pressure_coords = self.get_pressure_point_coord_from_two_points(coords[0],coords[1])
self.comp_objects = [test.get_tank_object()]
self._initial_fatigue_obj = test.get_fatigue_object()
else:
if app._line_to_struc[app._active_line][0] is None:
return
elif app._line_to_struc[app._active_line][0].Stiffener is None:
return
self.app = app
self.active_line = app._active_line
points = app._line_dict[self.active_line]
coords = (app._point_dict['point'+str(points[0])], app._point_dict['point'+str(points[1])])
self.pressure_coords = self.get_pressure_point_coord_from_two_points(coords[0], coords[1])
self._initial_structure_obj = app._line_to_struc[app._active_line][0].Stiffener
self.load_objects = app._line_to_struc[app._active_line][3]
self.comp_objects = [app._tank_dict['comp'+str(comp_i)] for comp_i in
app.get_compartments_for_line(app._active_line)]
self._initial_fatigue_obj = app._line_to_struc[self.active_line][2]
self._frame = master
self._frame.wm_title("Specify fatigue properties here.")
self._frame.geometry('1300x810')
self._frame.grab_set()
tk.Label(self._frame, text='-- Fatigue calculation for plates according to DNVGL-RP-C203, '
'Section 5 Simplified fatigue analysis --',
font='Verdana 15 bold').place(x=10, y=10)
ent_w = 10
self.new_sn_curve = tk.StringVar()
self.new_k_factor = tk.DoubleVar()
self.new_no_of_cycles = tk.DoubleVar()
self.new_design_life = tk.DoubleVar()
self.new_dff = tk.DoubleVar()
self.new_weibull_loa = tk.DoubleVar()
self.new_weibull_bal = tk.DoubleVar()
self.new_weibull_prt = tk.DoubleVar()
self.new_period_loa = tk.DoubleVar()
self.new_period_bal = tk.DoubleVar()
self.new_period_prt = tk.DoubleVar()
self.new_corr_loc_loa = tk.DoubleVar()
self.new_corr_loc_bal = tk.DoubleVar()
self.new_corr_loc_prt = tk.DoubleVar()
self.new_fraction_loa = tk.DoubleVar()
self.new_fraction_bal = tk.DoubleVar()
self.new_fraction_prt = tk.DoubleVar()
self.new_az_loa = tk.DoubleVar()
self.new_az_bal = tk.DoubleVar()
self.new_az_prt = tk.DoubleVar()
sn_curves = sn.get_all_curves()
self.ent_sn_curve = tk.OptionMenu(self._frame, self.new_sn_curve, command=self.change_sn_curve,*sn_curves)
self.ent_dff = tk.Entry(self._frame, textvariable=self.new_dff, width=ent_w)
self.ent_k_factor = tk.Entry(self._frame, textvariable=self.new_k_factor, width=ent_w)
self.ent_no_of_cycles = tk.Entry(self._frame, textvariable=self.new_no_of_cycles, width=ent_w)
self.ent_new_design_life = tk.Entry(self._frame, textvariable=self.new_design_life, width=ent_w)
self.ent_weibull_loa = tk.Entry(self._frame, textvariable=self.new_weibull_loa, width=ent_w)
self.ent_weibull_bal = tk.Entry(self._frame, textvariable=self.new_weibull_bal, width=ent_w)
self.ent_weibull_prt = tk.Entry(self._frame, textvariable=self.new_weibull_prt, width=ent_w)
self.ent_period_loa = tk.Entry(self._frame, textvariable=self.new_period_loa, width=ent_w)
self.ent_period_bal = tk.Entry(self._frame, textvariable=self.new_period_bal, width=ent_w)
self.ent_period_prt = tk.Entry(self._frame, textvariable=self.new_period_prt, width=ent_w)
self.ent_corr_loc_loa = tk.Entry(self._frame, textvariable=self.new_corr_loc_loa, width=ent_w)
self.ent_corr_loc_bal = tk.Entry(self._frame, textvariable=self.new_corr_loc_bal, width=ent_w)
self.ent_corr_loc_prt = tk.Entry(self._frame, textvariable=self.new_corr_loc_prt, width=ent_w)
self.ent_fraction_loa = tk.Entry(self._frame, textvariable=self.new_fraction_loa, width=ent_w)
self.ent_fraction_bal = tk.Entry(self._frame, textvariable=self.new_fraction_bal, width=ent_w)
self.ent_fraction_prt = tk.Entry(self._frame, textvariable=self.new_fraction_prt, width=ent_w)
self.ent_acc_loa = tk.Entry(self._frame, textvariable=self.new_az_loa, width=ent_w)
self.ent_acc_bal = tk.Entry(self._frame, textvariable=self.new_az_bal, width=ent_w)
self.ent_acc_prt = tk.Entry(self._frame, textvariable=self.new_az_prt, width=ent_w)
start_x, start_y, dx, dy = 20, 100, 150, 35
loaded_exist = False
ballast_exist = False
part_exist = False
fls_exist = (loaded_exist, ballast_exist, part_exist)
# Frames to hide loaded, ballast or part
loa_fr = tk.Frame(self._frame, width=100, height=170, bg="gray25", colormap="new")
bal_fr = tk.Frame(self._frame, width=100, height=170, bg="gray25", colormap="new")
prt_fr = tk.Frame(self._frame, width=100, height=170, bg="gray25", colormap="new")
for load in self.load_objects:
if load != None:
if load.get_limit_state() == 'FLS':
if load.get_load_condition() == 'loaded':
loaded_exist = True
elif load.get_load_condition() == 'ballast':
ballast_exist = True
elif load.get_load_condition() == 'part':
part_exist = True
else:
pass
fls_exist = (loaded_exist, ballast_exist, part_exist)
if self._initial_fatigue_obj == None:
self.new_sn_curve.set('Ec')
self.new_k_factor.set(1)
self.new_no_of_cycles.set(10000)
self.new_design_life.set(20)
self.new_dff.set(2)
if any(fls_exist):
if loaded_exist:
self.new_fraction_loa.set(1 / fls_exist.count(True))
self.new_weibull_loa.set(0.8)
self.new_period_loa.set(8)
self.new_corr_loc_loa.set(0.5)
self.new_az_loa.set(0.5)
if ballast_exist:
self.new_fraction_bal.set(1 / fls_exist.count(True))
self.new_weibull_bal.set(0.8)
self.new_period_bal.set(8)
self.new_corr_loc_bal.set(0.5)
self.new_az_bal.set(0.5)
if part_exist:
self.new_fraction_prt.set(1 / fls_exist.count(True))
self.new_weibull_prt.set(0.8)
self.new_period_prt.set(8)
self.new_corr_loc_prt.set(0.5)
self.new_az_prt.set(0.5)
else:
fat_prop = self._initial_fatigue_obj.get_fatigue_properties()
self.new_sn_curve.set(fat_prop['SN-curve'])
self.new_k_factor.set(fat_prop['SCF'])
self.new_no_of_cycles.set(fat_prop['n0'])
self.new_design_life.set(fat_prop['Design life'])
self.new_dff.set(fat_prop['DFF'])
if any(fls_exist):
if loaded_exist:
self.new_fraction_loa.set(fat_prop['Fraction'][0])
self.new_weibull_loa.set(fat_prop['Weibull'][0])
self.new_period_loa.set(fat_prop['Period'][0])
self.new_corr_loc_loa.set(fat_prop['CorrLoc'][0])
self.new_az_loa.set(fat_prop['Accelerations'][0])
if ballast_exist:
self.new_fraction_bal.set(fat_prop['Fraction'][1])
self.new_weibull_bal.set(fat_prop['Weibull'][1])
self.new_period_bal.set(fat_prop['Period'][1])
self.new_corr_loc_bal.set(fat_prop['CorrLoc'][1])
self.new_az_bal.set(fat_prop['Accelerations'][1])
if part_exist:
self.new_fraction_prt.set(fat_prop['Fraction'][2])
self.new_weibull_prt.set(fat_prop['Weibull'][2])
self.new_period_prt.set(fat_prop['Period'][2])
self.new_corr_loc_prt.set(fat_prop['CorrLoc'][2])
self.new_az_prt.set(fat_prop['Accelerations'][2])
all_acc = (self.new_az_loa.get(), self.new_az_bal.get(), self.new_az_prt.get())
count = 1
for load in self.load_objects:
if load == None:
continue
press = []
if load.get_limit_state() == 'FLS':
tk.Label(self._frame, text=str(count)+'. '+load.get_name(), font='Verdana 8')\
.place(x=start_x + 5 * dx, y=start_y + (5+count) * dy)
idx = 0
for exist in fls_exist:
if exist:
press.append(round(load.get_calculated_pressure(self.pressure_coords,all_acc[idx],
self._initial_structure_obj.
get_structure_type()), 1))
idx += 1
tk.Label(self._frame, text=press, font='Verdana 8') \
.place(x=start_x + 6.5 * dx, y=start_y + (5 + count) * dy)
count += 1
press = []
for comp in self.comp_objects:
if comp == None:
continue
tk.Label(self._frame, text=str(count) + '. ' +str(comp.get_name()), font='Verdana 8') \
.place(x=start_x + 5 * dx, y=start_y + (5 + count) * dy)
idx = 0
if fls_exist[0] and comp.is_loaded_condition():
press.append(round(comp.get_calculated_pressure(self.pressure_coords, all_acc[0]), 1))
if fls_exist[1] and comp.is_ballast_condition():
press.append(round(comp.get_calculated_pressure(self.pressure_coords, all_acc[1]), 1))
if fls_exist[2] and any([comp.is_loaded_condition(),comp.is_ballast_condition()]):
press.append(round(comp.get_calculated_pressure(self.pressure_coords, all_acc[2]), 1))
tk.Label(self._frame, text=press, font='Verdana 8') \
.place(x=start_x + 6.5 * dx, y=start_y + (5 + count) * dy)
count += 1
tk.Label(self._frame, text='Design life:', font='Verdana 8 bold') \
.place(x=start_x , y=start_y + 0*dy)
tk.Label(self._frame, text='Design Fatigue Factor (DFF):', font='Verdana 8 bold') \
.place(x=start_x+ 3*dx , y=start_y + 0*dy)
tk.Label(self._frame, text='SN-curve:', font='Verdana 8 bold') \
.place(x=start_x , y=start_y + 1*dy)
tk.Label(self._frame, text='Cycles in return period, n0', font='Verdana 8 bold') \
.place(x=start_x , y=start_y + 2*dy)
tk.Label(self._frame, text='Stress Concentration Factor, SCF', font='Verdana 8 bold') \
.place(x=start_x , y=start_y + 3*dy)
tk.Label(self._frame, text='Loaded', font='Verdana 8 bold') \
.place(x=start_x+2*dx , y=start_y + 5*dy)
tk.Label(self._frame, text='Ballast', font='Verdana 8 bold') \
.place(x=start_x+3*dx , y=start_y + 5*dy)
tk.Label(self._frame, text='Part', font='Verdana 8 bold') \
.place(x=start_x+4*dx , y=start_y + 5*dy)
tk.Label(self._frame, text='Defined loads', font='Verdana 8 bold') \
.place(x=start_x+5*dx , y=start_y + 5*dy)
tk.Label(self._frame, text='Resulting pressures', font='Verdana 8 bold') \
.place(x=start_x+6.5*dx , y=start_y + 5*dy)
tk.Label(self._frame, text='Fraction (sum of bal/part/loa is 1)', font='Verdana 8 bold') \
.place(x=start_x, y=start_y + 6 * dy)
tk.Label(self._frame, text='Weibull', font='Verdana 8 bold') \
.place(x=start_x , y=start_y + 7*dy)
tk.Label(self._frame, text='Period', font='Verdana 8 bold') \
.place(x=start_x , y=start_y + 8*dy)
tk.Label(self._frame, text='Corr. loc.', font='Verdana 8 bold') \
.place(x=start_x , y=start_y + 9*dy)
tk.Label(self._frame, text='Accelerations', font='Verdana 8 bold') \
.place(x=start_x , y=start_y + 10*dy)
self.ent_new_design_life.place(x=start_x+2*dx, y=start_y + 0 * dy)
self.ent_dff.place(x=start_x + 5 * dx, y=start_y + 0 * dy)
self.ent_sn_curve.place(x=start_x+2*dx , y=start_y + 1*dy)
self.ent_no_of_cycles .place(x=start_x+2*dx , y=start_y + 2*dy)
self.ent_k_factor.place(x=start_x+2*dx , y=start_y + 3*dy)
self.ent_fraction_loa.place(x=start_x + 2 * dx, y=start_y + 6 * dy)
self.ent_fraction_bal.place(x=start_x+3*dx , y=start_y + 6*dy)
self.ent_fraction_prt.place(x=start_x+4*dx , y=start_y + 6*dy)
self.ent_weibull_loa.place(x=start_x + 2 * dx, y=start_y + 7 * dy)
self.ent_weibull_bal.place(x=start_x+3*dx , y=start_y + 7*dy)
self.ent_weibull_prt.place(x=start_x+4*dx , y=start_y + 7*dy)
self.ent_period_loa.place(x=start_x + 2 * dx, y=start_y + 8 * dy)
self.ent_period_bal.place(x=start_x+3*dx, y=start_y + 8*dy)
self.ent_period_prt.place(x=start_x+4*dx, y=start_y + 8*dy)
self.ent_corr_loc_loa.place(x=start_x + 2 * dx, y=start_y + 9 * dy)
self.ent_corr_loc_bal.place(x=start_x+3*dx, y=start_y + 9*dy)
self.ent_corr_loc_prt.place(x=start_x+4*dx, y=start_y + 9*dy)
self.ent_acc_loa.place(x=start_x + 2 * dx, y=start_y + 10 * dy)
self.ent_acc_bal.place(x=start_x + 3 * dx, y=start_y + 10 * dy)
self.ent_acc_prt.place(x=start_x + 4 * dx, y=start_y + 10 * dy)
# if not loaded_exist:
# loa_fr.place(x=start_x + 2 * dx, y=start_y + 6 * dy)
# elif not ballast_exist:
# bal_fr.place(x=start_x + 3 * dx, y=start_y + 6 * dy)
# elif not part_exist:
# prt_fr.place(x=start_x + 4 * dx, y=start_y + 6 * dy)
self._close_and_save = tk.Button(self._frame, text='Return fatigue parameters',
command=self.save_and_close, bg='green', font='Verdana 15', fg='yellow')
self._close_and_save.place(x=start_x + dx, y=start_y + dy * 15)
def get_pressure_point_coord_from_two_points(self,p1,p2):
''' Finding the coordinates to use in pressure calculations '''
if p1[1] <= p2[1]:
start_point = p1
end_point = p2
else:
start_point = p2
end_point = p1
vector = [end_point[0]-start_point[0], end_point[1]-start_point[1]]
return start_point[0]+vector[0]*1/3, start_point[1]+vector[1]*1/3
def change_sn_curve(self,event):
'''
Action when changing the structure type
:return:
'''
self.new_sn_curve.set(event)
def save_and_close(self):
'''
Save and close
:return:
'''
if __name__ == '__main__':
self._frame.destroy()
return
# order of return is (Loaded,Ballast,Part)
temp_dict = {'SN-curve':self.new_sn_curve.get(),
'SCF':self.new_k_factor.get(),
'Design life':self.new_design_life.get(),
'n0':self.new_no_of_cycles.get(),
'Weibull':(self.new_weibull_loa.get(),self.new_weibull_bal.get(),self.new_weibull_prt.get()),
'Period':(self.new_period_loa.get(),self.new_period_bal.get(),self.new_period_prt.get()),
'Fraction':(self.new_fraction_loa.get(),self.new_fraction_bal.get(),self.new_fraction_prt.get()),
'CorrLoc':(self.new_corr_loc_loa.get(),self.new_corr_loc_bal.get(),self.new_corr_loc_prt.get()),
'Order':('Loaded','Ballast','Part'),
'Accelerations':(self.new_az_loa.get(),self.new_az_bal.get(),self.new_az_prt.get()),
'DFF': self.new_dff.get()}
self.app.on_close_fatigue_window(temp_dict)
self._frame.destroy()
if __name__ == '__main__':
root = tk.Tk()
my_app = CreateFatigueWindow(root,app=None)
root.mainloop() | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/fatigue_window.py | fatigue_window.py |
import tkinter as tk
from _tkinter import TclError
from tkinter.ttk import Progressbar
from tkinter import messagebox
import pickle
from tkinter.filedialog import askopenfilenames
from multiprocessing import cpu_count
from tkinter import filedialog
from matplotlib import pyplot as plt
try:
import any_files.main_application
import any_files.optimize as op
import any_files.example_data as test
from any_files.calc_structure import *
import any_files.calc_structure
from any_files.helper import *
except ModuleNotFoundError:
import ANYstructure.any_files.main_application
import ANYstructure.any_files.optimize as op
import ANYstructure.any_files.example_data as test
from ANYstructure.any_files.calc_structure import *
import ANYstructure.any_files.calc_structure
from ANYstructure.any_files.helper import *
class CreateOptGeoWindow():
'''
This class initiates the MultiOpt window.
'''
def __init__(self, master, app=None):
super(CreateOptGeoWindow, self).__init__()
if __name__ == '__main__':
self._load_objects = {}
self._load_comb_dict = {}
self._line_dict = test.get_line_dict()
self._load_count = 0
self._point_dict = test.get_point_dict()
self._canvas_scale = 20
self._line_to_struc = test.get_line_to_struc()
self._opt_frames = {}
self._active_points = ['point1','point4','point8','point5']
self._ML_buckling = dict() # Buckling machine learning algorithm
for name, file_base in zip(['cl SP buc int predictor', 'cl SP buc int scaler',
'cl SP ult int predictor', 'cl SP ult int scaler',
'cl SP buc GLGT predictor', 'cl SP buc GLGT scaler',
'cl SP ult GLGT predictor', 'cl SP ult GLGT scaler',
'cl UP buc int predictor', 'cl UP buc int scaler',
'cl UP ult int predictor', 'cl UP ult int scaler',
'cl UP buc GLGT predictor', 'cl UP buc GLGT scaler',
'cl UP ult GLGT predictor', 'cl UP ult GLGT scaler',
'CSR predictor', 'CSR scaler'
],
["ml_files\\CL_output_cl_buc_predictor_In-plane_support_cl_1_SP",
"ml_files\\CL_output_cl_buc_scaler_In-plane_support_cl_1_SP",
"ml_files\\CL_output_cl_ult_predictor_In-plane_support_cl_1_SP",
"ml_files\\CL_output_cl_ult_scaler_In-plane_support_cl_1_SP",
"ml_files\\CL_output_cl_buc_predictor_In-plane_support_cl_2,_3_SP",
"ml_files\\CL_output_cl_buc_scaler_In-plane_support_cl_2,_3_SP",
"ml_files\\CL_output_cl_ult_predictor_In-plane_support_cl_2,_3_SP",
"ml_files\\CL_output_cl_ult_scaler_In-plane_support_cl_2,_3_SP",
"ml_files\\CL_output_cl_buc_predictor_In-plane_support_cl_1_UP",
"ml_files\\CL_output_cl_buc_scaler_In-plane_support_cl_1_UP",
"ml_files\\CL_output_cl_ult_predictor_In-plane_support_cl_1_UP",
"ml_files\\CL_output_cl_ult_scaler_In-plane_support_cl_1_UP",
"ml_files\\CL_output_cl_buc_predictor_In-plane_support_cl_2,_3_UP",
"ml_files\\CL_output_cl_buc_scaler_In-plane_support_cl_2,_3_UP",
"ml_files\\CL_output_cl_ult_predictor_In-plane_support_cl_2,_3_UP",
"ml_files\\CL_output_cl_ult_scaler_In-plane_support_cl_2,_3_UP",
"CL_CSR-Tank_req_cl_predictor",
"CL_CSR-Tank_req_cl_UP_scaler",
"CL_CSR_plate_cl,_CSR_web_cl,_CSR_web_flange_cl,_CSR_flange_cl_predictor",
"CL_CSR_plate_cl,_CSR_web_cl,_CSR_web_flange_cl,_CSR_flange_cl_SP_scaler"]):
self._ML_buckling[name] = None
if os.path.isfile(file_base + '.pickle'):
file = open(file_base + '.pickle', 'rb')
self._ML_buckling[name] = pickle.load(file)
file.close()
else:
self.app = app
self._load_objects = app._load_dict
self._load_comb_dict = app._new_load_comb_dict
self._line_dict = app._line_dict
self._load_count = 0
self._point_dict = app._point_dict
self._canvas_scale = app._canvas_scale
self._line_to_struc = app._line_to_struc
self._opt_frames = {}
self._active_points = []
self._root_dir = app._root_dir
self._ML_buckling = app._ML_buckling
self._opt_structure = {}
self._opt_frames_obj = []
self._frame = master
self._frame.wm_title("Optimize structure")
self._frame.geometry('1800x950')
self._frame.grab_set()
self._canvas_origo = (50, 720 - 50)
self._canvas_base_origo = self._canvas_origo
self._canvas_draw_origo = list(self._canvas_base_origo)
self._previous_drag_mouse = list(self._canvas_draw_origo)
self._active_lines = []
self._add_to_lines = True
self._lines_add_to_load = []
self._active_point = None
self._point_is_active = False
# ----------------------------------COPIED FROM OPTIMIZE_WINDOW----------------------------------------------- #
self._opt_resutls = {}
self._geo_results = None
self._opt_actual_running_time = tk.Label(self._frame, text='')
tk.Frame(self._frame, width=770, height=5, bg="grey", colormap="new").place(x=20, y=95)
tk.Frame(self._frame, width=770, height=5, bg="grey", colormap="new").place(x=20, y=135)
algorithms = ('anysmart',' ')
tk.Label(self._frame, text='-- Plate field span optimizer for plate fields separated by frames. --',
font='Verdana 15 bold').place(x=10, y=10)
# upper and lower bounds for optimization
# [0.6, 0.012, 0.3, 0.01, 0.1, 0.01]
self._new_spacing_upper = tk.DoubleVar()
self._new_spacing_lower = tk.DoubleVar()
self._new_pl_thk_upper = tk.DoubleVar()
self._new_pl_thk_lower = tk.DoubleVar()
self._new_web_h_upper = tk.DoubleVar()
self._new_web_h_lower = tk.DoubleVar()
self._new_web_thk_upper = tk.DoubleVar()
self._new_web_thk_lower = tk.DoubleVar()
self._new_fl_w_upper = tk.DoubleVar()
self._new_fl_w_lower = tk.DoubleVar()
self._new_fl_thk_upper = tk.DoubleVar()
self._new_fl_thk_lower = tk.DoubleVar()
self._new_span = tk.DoubleVar()
self._new_width_lg = tk.DoubleVar()
self._new_algorithm = tk.StringVar()
self._new_algorithm_random_trials = tk.IntVar()
self._new_delta_spacing = tk.DoubleVar()
self._new_delta_pl_thk = tk.DoubleVar()
self._new_delta_web_h = tk.DoubleVar()
self._new_delta_web_thk = tk.DoubleVar()
self._new_delta_fl_w = tk.DoubleVar()
self._new_delta_fl_thk = tk.DoubleVar()
self._new_swarm_size = tk.IntVar()
self._new_omega = tk.DoubleVar()
self._new_phip = tk.DoubleVar()
self._new_phig = tk.DoubleVar()
self._new_maxiter = tk.IntVar()
self._new_minstep = tk.DoubleVar()
self._new_minfunc = tk.DoubleVar()
self._new_processes = tk.IntVar()
self._new_opt_girder_thk = tk.DoubleVar()
self._new_opt_girder_stf_web_h = tk.DoubleVar()
self._new_opt_girder_stf_web_thk = tk.DoubleVar()
self._new_opt_girder_stf_flange_b = tk.DoubleVar()
self._new_opt_girder_stf_flange_thk = tk.DoubleVar()
self._new_opt_girder_scale_high = tk.DoubleVar()
self._new_opt_girder_scale_low = tk.DoubleVar()
self._new_opt_span_max = tk.DoubleVar()
self._new_opt_span_min = tk.DoubleVar()
self._new_option_fraction = tk.IntVar()
self._new_option_panel = tk.IntVar()
ent_w = 10
self._ent_spacing_upper = tk.Entry(self._frame, textvariable=self._new_spacing_upper, width=ent_w)
self._ent_spacing_lower = tk.Entry(self._frame, textvariable=self._new_spacing_lower, width=ent_w)
self._ent_pl_thk_upper = tk.Entry(self._frame, textvariable=self._new_pl_thk_upper, width=ent_w)
self._ent_pl_thk_lower = tk.Entry(self._frame, textvariable=self._new_pl_thk_lower, width=ent_w)
self._ent_web_h_upper = tk.Entry(self._frame, textvariable=self._new_web_h_upper, width=ent_w)
self._ent_web_h_lower = tk.Entry(self._frame, textvariable=self._new_web_h_lower, width=ent_w)
self._ent_web_thk_upper = tk.Entry(self._frame, textvariable=self._new_web_thk_upper, width=ent_w)
self._ent_web_thk_lower = tk.Entry(self._frame, textvariable=self._new_web_thk_lower, width=ent_w)
self._ent_fl_w_upper = tk.Entry(self._frame, textvariable=self._new_fl_w_upper, width=ent_w)
self._ent_fl_w_lower = tk.Entry(self._frame, textvariable=self._new_fl_w_lower, width=ent_w)
self._ent_fl_thk_upper = tk.Entry(self._frame, textvariable=self._new_fl_thk_upper, width=ent_w)
self._ent_fl_thk_lower = tk.Entry(self._frame, textvariable=self._new_fl_thk_lower, width=ent_w)
self._ent_span = tk.Entry(self._frame, textvariable=self._new_span, width=ent_w)
self._ent_width_lg = tk.Entry(self._frame, textvariable=self._new_width_lg, width=ent_w)
self._ent_algorithm = tk.OptionMenu(self._frame, self._new_algorithm, command=self.selected_algorithm,
*algorithms)
self._ent_random_trials = tk.Entry(self._frame, textvariable=self._new_algorithm_random_trials)
self._ent_delta_spacing = tk.Entry(self._frame, textvariable=self._new_delta_spacing, width=ent_w)
self._ent_delta_pl_thk = tk.Entry(self._frame, textvariable=self._new_delta_pl_thk, width=ent_w)
self._ent_delta_web_h = tk.Entry(self._frame, textvariable=self._new_delta_web_h, width=ent_w)
self._ent_delta_web_thk = tk.Entry(self._frame, textvariable=self._new_delta_web_thk, width=ent_w)
self._ent_delta_fl_w = tk.Entry(self._frame, textvariable=self._new_delta_fl_w, width=ent_w)
self._ent_delta_fl_thk = tk.Entry(self._frame, textvariable=self._new_delta_fl_thk, width=ent_w)
pso_width = 10
self._ent_swarm_size = tk.Entry(self._frame, textvariable=self._new_swarm_size, width=pso_width)
self._ent_omega = tk.Entry(self._frame, textvariable=self._new_omega, width=pso_width)
self._ent_phip = tk.Entry(self._frame, textvariable=self._new_phip, width=pso_width)
self._ent_phig = tk.Entry(self._frame, textvariable=self._new_phig, width=pso_width)
self._ent_maxiter = tk.Entry(self._frame, textvariable=self._new_maxiter, width=pso_width)
self._ent_minstep = tk.Entry(self._frame, textvariable=self._new_minstep, width=pso_width)
self._ent_minfunc = tk.Entry(self._frame, textvariable=self._new_minfunc, width=pso_width)
self._ent_opt_girder_thk = tk.Entry(self._frame, textvariable=self._new_opt_girder_thk, width=ent_w)
self._ent_opt_girder_stf_web_h = tk.Entry(self._frame, textvariable=self._new_opt_girder_stf_web_h,
width=ent_w)
self._ent_opt_girder_stf_web_thk = tk.Entry(self._frame, textvariable=self._new_opt_girder_stf_web_thk,
width=ent_w)
self._ent_opt_girder_stf_fl_b = tk.Entry(self._frame, textvariable=self._new_opt_girder_stf_flange_b,
width=ent_w)
self._ent_opt_girder_stf_fl_thk = tk.Entry(self._frame, textvariable=self._new_opt_girder_stf_flange_thk,
width=ent_w)
self._ent_opt_girder_scale_high = tk.Entry(self._frame, textvariable=self._new_opt_girder_scale_high,
width=int(ent_w/2))
self._ent_opt_girder_scale_low = tk.Entry(self._frame, textvariable=self._new_opt_girder_scale_low,
width=int(ent_w/2))
self._ent_opt_max_span = tk.Entry(self._frame, textvariable=self._new_opt_span_max,
width=int(ent_w/2))
self._ent_opt_min_span = tk.Entry(self._frame, textvariable=self._new_opt_span_min,
width=int(ent_w/2))
start_x, start_y, dx, dy = 20, 70, 100, 40
tk.Label(self._frame, text='Processes\n (CPUs)', font='Verdana 9 bold', bg = 'silver')\
.place(x=start_x + 8 * dx, y=start_y + 0.5 * dy)
tk.Entry(self._frame, textvariable=self._new_processes, width = 12, bg = 'silver')\
.place(x=start_x + 8 * dx, y=start_y + 1.4 * dy)
self._prop_canvas_dim = (500, 450)
self._draw_scale = 500
self._canvas_opt = tk.Canvas(self._frame, width=self._prop_canvas_dim[0], height=self._prop_canvas_dim[1],
background='azure', relief='groove', borderwidth=2)
self._canvas_opt.place(x=start_x + 10.5 * dx, y=start_y + 3.5 * dy)
self._select_canvas_dim = (1000, 720)
self._canvas_select = tk.Canvas(self._frame, width=self._select_canvas_dim[0],
height=self._select_canvas_dim[1],
background='azure', relief='groove', borderwidth=2)
self._canvas_select.place(x=start_x + 0 * dx, y=start_y + 3.5 * dy)
# Labels for the pso
self._lb_swarm_size = tk.Label(self._frame, text='swarm size')
self._lb_omega = tk.Label(self._frame, text='omega')
self._lb_phip = tk.Label(self._frame, text='phip')
self._lb_phig = tk.Label(self._frame, text='phig')
self._lb_maxiter = tk.Label(self._frame, text='maxiter')
self._lb_minstep = tk.Label(self._frame, text='minstep')
self._lb_minfunc = tk.Label(self._frame, text='minfunc')
tk.Label(self._frame, text='Upper bounds [mm]', font='Verdana 9').place(x=start_x, y=start_y)
tk.Label(self._frame, text='Iteration delta [mm]', font='Verdana 9').place(x=start_x, y=start_y + dy)
tk.Label(self._frame, text='Lower bounds [mm]', font='Verdana 9').place(x=start_x, y=start_y + 2 * dy)
tk.Label(self._frame, text='Spacing [mm]', font='Verdana 7 bold').place(x=start_x + 1.97 * dx,
y=start_y - 0.6 * dy)
tk.Label(self._frame, text='Plate thk. [mm]', font='Verdana 7 bold').place(x=start_x + 2.97 * dx,
y=start_y - 0.6 * dy)
tk.Label(self._frame, text='Web height [mm]', font='Verdana 7 bold').place(x=start_x + 3.97 * dx,
y=start_y - 0.6 * dy)
tk.Label(self._frame, text='Web thk. [mm]', font='Verdana 7 bold').place(x=start_x + 4.97 * dx,
y=start_y - 0.6 * dy)
tk.Label(self._frame, text='Flange width [mm]', font='Verdana 7 bold').place(x=start_x + 5.97 * dx,
y=start_y - 0.6 * dy)
tk.Label(self._frame, text='Flange thk. [mm]', font='Verdana 7 bold').place(x=start_x + 6.97 * dx,
y=start_y - 0.6 * dy)
tk.Label(self._frame, text='Estimated running time for algorithm not calculated.',
font='Verdana 9 bold').place(x=start_x, y=start_y + 2.8 * dy)
tk.Label(self._frame, text='- Harmonize stiffener spacing for section.', font='Verdana 9 bold')\
.place(x=start_x + 5*dx, y=start_y + 2.8 * dy)
# self._runnig_time_label = tk.Label(self._frame, text='', font='Verdana 9 bold')
# self._runnig_time_label.place(x=start_x + 2.7 * dx, y=start_y + 2.8 * dy)
# tk.Label(self._frame, text='seconds ', font='Verdana 9 bold').place(x=start_x + 3.3 * dx, y=start_y + 2.8 * dy)
self._result_label = tk.Label(self._frame, text='', font='Verdana 9 bold')
self._result_label.place(x=start_x, y=start_y + 3.4 * dy)
self._ent_spacing_upper.place(x=start_x + dx * 2, y=start_y)
self._ent_delta_spacing.place(x=start_x + dx * 2, y=start_y + dy)
self._ent_spacing_lower.place(x=start_x + dx * 2, y=start_y + 2 * dy)
self._ent_pl_thk_upper.place(x=start_x + dx * 3, y=start_y)
self._ent_delta_pl_thk.place(x=start_x + dx * 3, y=start_y + dy)
self._ent_pl_thk_lower.place(x=start_x + dx * 3, y=start_y + 2 * dy)
self._ent_web_h_upper.place(x=start_x + dx * 4, y=start_y)
self._ent_delta_web_h.place(x=start_x + dx * 4, y=start_y + dy)
self._ent_web_h_lower.place(x=start_x + dx * 4, y=start_y + 2 * dy)
self._ent_web_thk_upper.place(x=start_x + dx * 5, y=start_y)
self._ent_delta_web_thk.place(x=start_x + dx * 5, y=start_y + dy)
self._ent_web_thk_lower.place(x=start_x + dx * 5, y=start_y + 2 * dy)
self._ent_fl_w_upper.place(x=start_x + dx * 6, y=start_y)
self._ent_delta_fl_w.place(x=start_x + dx * 6, y=start_y + dy)
self._ent_fl_w_lower.place(x=start_x + dx * 6, y=start_y + 2 * dy)
self._ent_fl_thk_upper.place(x=start_x + dx * 7, y=start_y)
self._ent_delta_fl_thk.place(x=start_x + dx * 7, y=start_y + dy)
self._ent_fl_thk_lower.place(x=start_x + dx * 7, y=start_y + 2 * dy)
# setting default values
init_dim = float(100) # mm
init_thk = float(5) # mm
self._new_delta_spacing.set(init_dim)
self._new_delta_pl_thk.set(init_thk)
self._new_delta_web_h.set(init_dim)
self._new_delta_web_thk.set(init_thk)
self._new_delta_fl_w.set(init_dim)
self._new_delta_fl_thk.set(init_thk)
self._new_spacing_upper.set(round(800, 5))
self._new_spacing_lower.set(round(600, 5))
self._new_pl_thk_upper.set(round(25, 5))
self._new_pl_thk_lower.set(round(10, 5))
self._new_web_h_upper.set(round(500, 5))
self._new_web_h_lower.set(round(300, 5))
self._new_web_thk_upper.set(round(25, 5))
self._new_web_thk_lower.set(round(10, 5))
self._new_fl_w_upper.set(round(250, 5))
self._new_fl_w_lower.set(round(50, 5))
self._new_fl_thk_upper.set(round(30, 5))
self._new_fl_thk_lower.set(round(10, 5))
self._new_algorithm.set('anysmart')
self._new_algorithm_random_trials.set(10000)
self._new_processes.set(max(cpu_count() - 1, 1))
self._new_opt_girder_thk.set(0.018)
self._new_opt_girder_stf_web_h.set(0.250)
self._new_opt_girder_stf_web_thk.set(0.015)
self._new_opt_girder_stf_flange_b.set(0)
self._new_opt_girder_stf_flange_thk.set(0)
self._new_opt_girder_scale_high.set(1.1)
self._new_opt_girder_scale_low.set(0.9)
self._new_opt_span_max.set(6)
self._new_opt_span_min.set(2)
self._new_swarm_size.set(100)
self._new_omega.set(0.5)
self._new_phip.set(0.5)
self._new_phig.set(0.5)
self._new_maxiter.set(100)
self._new_minstep.set(1e-8)
self._new_minfunc.set(1e-8)
# self._new_delta_spacing.trace('w', self.update_running_time)
# self._new_delta_pl_thk.trace('w', self.update_running_time)
# self._new_delta_web_h.trace('w', self.update_running_time)
# self._new_delta_web_thk.trace('w', self.update_running_time)
# self._new_delta_fl_w.trace('w', self.update_running_time)
# self._new_delta_fl_thk.trace('w', self.update_running_time)
# self._new_spacing_upper.trace('w', self.update_running_time)
# self._new_spacing_lower.trace('w', self.update_running_time)
# self._new_pl_thk_upper.trace('w', self.update_running_time)
# self._new_pl_thk_lower.trace('w', self.update_running_time)
# self._new_web_h_upper.trace('w', self.update_running_time)
# self._new_web_h_lower.trace('w', self.update_running_time)
# self._new_web_thk_upper.trace('w', self.update_running_time)
# self._new_web_thk_lower.trace('w', self.update_running_time)
# self._new_fl_w_upper.trace('w', self.update_running_time)
# self._new_fl_w_lower.trace('w', self.update_running_time)
# self._new_fl_thk_upper.trace('w', self.update_running_time)
# self._new_fl_thk_lower.trace('w', self.update_running_time)
# self._new_algorithm_random_trials.trace('w', self.update_running_time)
# self._new_algorithm.trace('w', self.update_running_time)
self.running_time_per_item = 4e-05
#self._runnig_time_label.config(text=str(self.get_running_time()))
self._ent_algorithm.place(x=start_x + dx * 10, y=start_y + dy)
self.algorithm_random_label = tk.Label(self._frame, text='Number of trials')
tk.Button(self._frame, text='algorith information', command=self.algorithm_info, bg='white') \
.place(x=start_x + dx * 10, y=start_y + dy * 2)
self.run_button = tk.Button(self._frame, text='RUN OPTIMIZATION!', command=self.run_optimizaion, bg='red',
font='Verdana 10', fg='Yellow')
self.run_button.place(x=start_x + dx * 10, y=start_y)
self._opt_actual_running_time.place(x=start_x + dx * 8, y=start_y + dy * 1.5)
# self.close_and_save = tk.Button(self._frame, text='Return and replace with selected optimized structure',
# command=self.save_and_close, bg='green', font='Verdana 10 bold', fg='yellow')
# self.close_and_save.place(x=start_x + dx * 10, y=10)
tk.Button(self._frame, text='Open predefined stiffeners example',
command=self.open_example_file, bg='white', font='Verdana 10')\
.place(x=start_x+dx*10,y=10)
# Selection of constraints
self._new_check_sec_mod = tk.BooleanVar()
self._new_check_min_pl_thk = tk.BooleanVar()
self._new_check_shear_area = tk.BooleanVar()
self._new_check_buckling = tk.BooleanVar()
self._new_check_fatigue = tk.BooleanVar()
self._new_check_slamming = tk.BooleanVar()
self._new_check_local_buckling = tk.BooleanVar()
self._new_harmonize_spacing = tk.BooleanVar()
self._new_check_buckling_ml_cl = tk.BooleanVar()
self._new_check_sec_mod.set(True)
self._new_check_min_pl_thk.set(True)
self._new_check_shear_area.set(True)
self._new_check_buckling.set(True)
self._new_check_fatigue.set(True)
self._new_check_slamming.set(True)
self._new_check_local_buckling.set(True)
self._new_option_fraction.set(None)
self._new_option_panel.set(None)
self._new_harmonize_spacing.set(False)
self._new_check_buckling_ml_cl.set(False)
self._new_check_buckling_ml_cl.trace('w', self.update_running_time)
start_y, start_x, dy = 570, 100, 25
tk.Label(self._frame,text='Check for minimum section modulus').place(x=start_x+dx*9.7,y=start_y+4*dy)
tk.Label(self._frame, text='Check for minimum plate thk.').place(x=start_x+dx*9.7,y=start_y+5*dy)
tk.Label(self._frame, text='Check for minimum shear area').place(x=start_x+dx*9.7,y=start_y+6*dy)
tk.Label(self._frame, text='Check for buckling (RP-C201)').place(x=start_x+dx*9.7,y=start_y+7*dy)
tk.Label(self._frame, text='Check for fatigue (RP-C203)').place(x=start_x + dx * 9.7, y=start_y + 8 * dy)
tk.Label(self._frame, text='Check for bow slamming').place(x=start_x + dx * 9.7, y=start_y + 9 * dy)
tk.Label(self._frame, text='Check for local stf. buckling').place(x=start_x + dx * 9.7, y=start_y + 10 * dy)
tk.Label(self._frame, text='Check for buckling, ML-CL').place(x=start_x + dx * 9.7, y=start_y + 11 * dy)
tk.Label(self._frame, text='Frame (girder data) for weight calculation:', font = 'Verdana 9 bold')\
.place(x=start_x + dx * 13,
y=start_y + 4 * dy)
tk.Label(self._frame, text='Girder thickness').place(x=start_x + dx * 13, y=start_y + 5 * dy)
tk.Label(self._frame, text='Stiffener height').place(x=start_x + dx * 13, y=start_y + 6 * dy)
tk.Label(self._frame, text='Stiffener thickness').place(x=start_x + dx * 13, y=start_y + 7 * dy)
tk.Label(self._frame, text='Stf. flange width').place(x=start_x + dx * 13, y=start_y + 8 * dy)
tk.Label(self._frame, text='Stf. flange thickenss').place(x=start_x + dx * 13, y=start_y + 9 * dy)
tk.Label(self._frame, text='For weight calculation of girder: Max span mult / Min span mult')\
.place(x=start_x + dx * 13,y=start_y + 10 * dy)
tk.Label(self._frame, text='Maximum span / Minimum span ->')\
.place(x=start_x + dx * 13,y=start_y + 12 * dy)
self._ent_opt_girder_thk.place(x=start_x + dx * 15, y=start_y + 5 * dy)
self._ent_opt_girder_stf_web_h.place(x=start_x + dx * 15, y=start_y + 6 * dy)
self._ent_opt_girder_stf_web_thk.place(x=start_x + dx * 15, y=start_y + 7 * dy)
self._ent_opt_girder_stf_fl_b.place(x=start_x + dx * 15, y=start_y + 8 * dy)
self._ent_opt_girder_stf_fl_thk.place(x=start_x + dx * 15, y=start_y + 9 * dy)
self._ent_opt_girder_scale_high.place(x=start_x + dx * 15, y=start_y + 11 * dy)
self._ent_opt_girder_scale_low.place(x=start_x + dx * 15.5, y=start_y + 11 * dy)
self._ent_opt_max_span.place(x=start_x + dx * 15, y=start_y + 12 * dy)
self._ent_opt_min_span.place(x=start_x + dx * 15.5, y=start_y + 12 * dy)
tk.Checkbutton(self._frame,variable=self._new_check_sec_mod).place(x=start_x+dx*12,y=start_y+4*dy)
tk.Checkbutton(self._frame, variable=self._new_check_min_pl_thk).place(x=start_x+dx*12,y=start_y+5*dy)
tk.Checkbutton(self._frame, variable=self._new_check_shear_area).place(x=start_x+dx*12,y=start_y+6*dy)
tk.Checkbutton(self._frame, variable=self._new_check_buckling).place(x=start_x+dx*12,y=start_y+7*dy)
tk.Checkbutton(self._frame, variable=self._new_check_fatigue).place(x=start_x + dx * 12, y=start_y + 8 * dy)
tk.Checkbutton(self._frame, variable=self._new_check_slamming).place(x=start_x + dx * 12, y=start_y + 9 * dy)
tk.Checkbutton(self._frame, variable=self._new_check_local_buckling).place(x=start_x + dx * 12,
y=start_y + 10 * dy)
tk.Checkbutton(self._frame, variable=self._new_check_buckling_ml_cl).place(x=start_x + dx * 12,
y=start_y + 11 * dy)
tk.Checkbutton(self._frame, variable=self._new_harmonize_spacing).place(x=start_x + 3.9*dx, y=180)
# Stress scaling
self._new_fup = tk.DoubleVar()
self._new_fup.set(0.5)
self._new_fdwn = tk.DoubleVar()
self._new_fdwn.set(1)
tk.Label(self._frame, text='Factor when scaling stresses up, fup')\
.place(x=start_x + dx * 9.7, y=start_y + 12 * dy)
ent_fup = tk.Entry(self._frame, textvariable=self._new_fup, width = 5)
ent_fup.place(x=start_x + dx * 12, y=start_y + 12 * dy)
tk.Label(self._frame, text='Factor when scaling stresses up, fdown')\
.place(x=start_x + dx * 9.7, y=start_y + 13 * dy)
ent_fdwn = tk.Entry(self._frame, textvariable=self._new_fdwn, width = 5)
ent_fdwn.place(x=start_x + dx * 12, y=start_y + 13 * dy)
self._toggle_btn = tk.Button(self._frame, text="Iterate predefiened stiffeners", relief="raised",
command=self.toggle, bg = 'salmon')
self._toggle_btn.place(x=start_x+dx*10.5, y=start_y - dy * 16.8)
self._toggle_object, self._filez = None, None
self._options_fractions = (None, )
self._options_panels = (None, )
tk.Label(self._frame, text='Select number of panels:').place(x=start_x+dx*12, y=start_y - dy * 20.5)
tk.Label(self._frame, text='Select panel to plot: ').place(x=start_x+dx*12, y=start_y - dy * 19.5)
self._ent_option_fractions = tk.OptionMenu(self._frame, self._new_option_fraction, *self._options_fractions,
command=self.get_plate_field_options)
self._ent_option_field = tk.OptionMenu(self._frame, self._new_option_panel, *self._options_panels,
command=self.get_plate_field_options)
self._option_fractions_place = [start_x+dx*13.5, start_y - dy * 20.5]
self._options_panels_place = [start_x+dx*13.5, start_y - dy * 19.5]
self._ent_option_fractions.place(x=self._option_fractions_place[0], y=self._option_fractions_place[1])
self._ent_option_field.place(x=self._options_panels_place[0], y=self._options_panels_place[1])
self.run_results = tk.Button(self._frame,text='show calculated', command=self.plot_results, bg='white',
font='Verdana 10',fg='black')
self.run_results.place(x=start_x+dx*13, y=start_y - dy * 18)
self.run_results_prev = tk.Button(self._frame,text='Show previous\n'
'results', command=self.show_previous_results, bg='white',
font='Verdana 10',fg='black')
self.run_results_prev.place(x=start_x+dx*15, y=start_y - dy * 20)
# ----------------------------------END OF OPTIMIZE SINGLE COPY-----------------------------------------------
self.progress_count = tk.IntVar()
self.progress_count.set(0)
self.progress_bar = Progressbar(self._frame, orient="horizontal",length=200, mode="determinate",
variable=self.progress_count)
#self.progress_bar.place(x=start_x+dx*10.5,y=start_y-dy*16.5)
self._active_lines = []
self.controls()
self.draw_select_canvas()
# if __name__ == '__main__':
# self.run_optimizaion(load_pre = True, save_results=True)
def selected_algorithm(self, event):
'''
Action when selecting an algorithm in the optionm menu.
:return:
'''
start_x, start_y, dx, dy = 20, 100, 100, 40
if self._new_algorithm.get() == 'random' or self._new_algorithm.get() == 'random_no_delta':
self._ent_random_trials.place_forget()
self.algorithm_random_label.place_forget()
self._lb_swarm_size.place_forget()
self._lb_omega.place_forget()
self._lb_phip.place_forget()
self._lb_phig.place_forget()
self._lb_maxiter.place_forget()
self._lb_minstep.place_forget()
self._lb_minfunc.place_forget()
self._ent_swarm_size.place_forget()
self._ent_omega.place_forget()
self._ent_phip.place_forget()
self._ent_phig.place_forget()
self._ent_maxiter.place_forget()
self._ent_minstep.place_forget()
self._ent_minfunc.place_forget()
self._ent_random_trials.place(x=start_x + dx * 11.3, y=start_y + 1.2 * dy)
self.algorithm_random_label.place(x=start_x + dx * 11.3, y=start_y + 0.5 * dy)
elif self._new_algorithm.get() == 'anysmart' or self._new_algorithm.get() == 'anydetail':
self._ent_random_trials.place_forget()
self.algorithm_random_label.place_forget()
self._lb_swarm_size.place_forget()
self._lb_omega.place_forget()
self._lb_phip.place_forget()
self._lb_phig.place_forget()
self._lb_maxiter.place_forget()
self._lb_minstep.place_forget()
self._lb_minfunc.place_forget()
self._ent_swarm_size.place_forget()
self._ent_omega.place_forget()
self._ent_phip.place_forget()
self._ent_phig.place_forget()
self._ent_maxiter.place_forget()
self._ent_minstep.place_forget()
self._ent_minfunc.place_forget()
elif self._new_algorithm.get() == 'pso':
y_place_label = 11.2
y_place = 12.2
self._ent_random_trials.place_forget()
start_x = 150
self._lb_swarm_size.place(x=start_x + dx * 11, y=start_y - 1 * dy)
self._lb_omega.place(x=start_x + dx * 11, y=start_y - 0 * dy)
self._lb_phip.place(x=start_x + dx * 11, y=start_y + 1 * dy)
self._lb_phig.place(x=start_x + dx * 11, y=start_y + 2 * dy)
self._lb_maxiter.place(x=start_x + dx * 14, y=start_y - 1 * dy)
self._lb_minstep.place(x=start_x + dx * 14, y=start_y + 0 * dy)
self._lb_minfunc.place(x=start_x + dx * 14, y=start_y + 1 * dy)
self._ent_swarm_size.place(x=start_x + dx * 12, y=start_y - 1 * dy)
self._ent_omega.place(x=start_x + dx * 12, y=start_y - 0 * dy)
self._ent_phip.place(x=start_x + dx * 12, y=start_y + 1 * dy)
self._ent_phig.place(x=start_x + dx * 12, y=start_y + 2 * dy)
self._ent_maxiter.place(x=start_x + dx * 15, y=start_y - 1 * dy)
self._ent_minstep.place(x=start_x + dx * 15, y=start_y + 0 * dy)
self._ent_minfunc.place(x=start_x + dx * 15, y=start_y + 1 * dy)
def show_previous_results(self):
# if type(self._geo_results) is not list():
# if self._geo_results is not None:
# return
# else:
# if self._geo_results[0] is not None:
# return
self.draw_select_canvas(opt_results=self._geo_results)
def run_optimizaion(self, load_pre = False, save_results = False, harmonize = False):
'''
Function when pressing the optimization botton inside this window.
:return:
'''
frames, distances = self.opt_create_frames(self.opt_get_fractions())
self.opt_create_main_structure(frames,
self._active_points[0], self._active_points[1],
self._active_points[2], self._active_points[3])
contraints = (self._new_check_sec_mod.get(), self._new_check_min_pl_thk.get(),
self._new_check_shear_area.get(), self._new_check_buckling.get(),
self._new_check_fatigue.get(), self._new_check_slamming.get(),
self._new_check_local_buckling.get(), False, self._new_check_buckling_ml_cl.get(), False)
self.pso_parameters = (self._new_swarm_size.get(), self._new_omega.get(), self._new_phip.get(),
self._new_phig.get(),self._new_maxiter.get(), self._new_minstep.get(),
self._new_minfunc.get())
opt_girder_prop = (self._new_opt_girder_thk.get(), self._new_opt_girder_stf_web_h.get(),
self._new_opt_girder_stf_web_thk.get(), self._new_opt_girder_stf_flange_b.get(),
self._new_opt_girder_stf_flange_thk.get(),self._new_opt_girder_scale_high.get(),
self._new_opt_girder_scale_low.get())
min_max_span = (self._new_opt_span_min.get(), self._new_opt_span_max.get())
init_objects, fatigue_objects, fat_press_ext_int, slamming_pressures, lateral_press, fatigue_objects, \
slamming_press = [list() for dummy in range(7)]
broke = False
pressure_side = 'both sides' # default value
for line,coord in self._opt_structure.items():
if self.opt_create_struc_obj(self._opt_structure[line]) is None:
broke = True
break
else:
init_objects.append(self.opt_create_struc_obj(self._opt_structure[line])[0])
fat_obj_single = self.opt_create_struc_obj(self._opt_structure[line])[2]
fatigue_objects.append(fat_obj_single)
if __name__ == '__main__':
import example_data as ex
lateral_press.append(0.2) # for testing
slamming_press.append(0)
fatigue_objects.append(ex.get_fatigue_object())
for pressure in ex.get_geo_opt_fat_press():
fat_press_ext_int.append(((pressure['p_ext']['loaded'], pressure['p_ext']['ballast'],
pressure['p_ext']['part']),
(pressure['p_int']['loaded'], pressure['p_int']['ballast'],
pressure['p_int']['part'])))
pressure_side = 'both sides'
else:
p1, p2 = self._opt_structure[line]
# Check if line is horizontal or vertical
if p2[0] == p1[0]: # Vertical
to_find = [p2[0], min(p2[1], p1[1]) + abs((p2[1]-p1[1])*0.5)]
elif p2[1] == p1[1]: # Horizontal
to_find = [min(p2[0], p1[0])+ (p2[0]-p1[0])*0.5, p2[1]]
else: # Other orientations
to_find = [min(p2[0],p1[0])+abs((p2[0]-p1[0])*0.5), min(p2[1]-p1[1])+abs((p2[1]-p1[1])*0.5)]
# Taking properites from the closest line.
closet_line = self.opt_find_closest_orig_line(to_find)
pressure_side = self._line_to_struc[closet_line].overpressure_side
#print('Closest line', closet_line, p1, p2, to_find)
gotten_lat_press = self.app.get_highest_pressure(closet_line)
lateral_press.append(gotten_lat_press['normal'] / 1e6)
slamming_press.append(gotten_lat_press['slamming'])
if fat_obj_single is not None:
fat_press_single = self.app.get_fatigue_pressures(closet_line, fat_obj_single.get_accelerations())
fat_press_tuple = ((fat_press_single['p_ext']['loaded'], fat_press_single['p_ext']['ballast'],
fat_press_single['p_ext']['part']),
(fat_press_single['p_int']['loaded'], fat_press_single['p_int']['ballast'],
fat_press_single['p_int']['part']))
fat_press_ext_int.append(fat_press_tuple)
else:
fat_press_ext_int.append(((0, 0, 0), (0, 0,0)))
# except AttributeError:
# print('AttributeError')
# fat_press_ext_int.append(None)
if broke:
messagebox.showinfo(title='Selection error.',
message='This field cannot be subdivided or there are no loads. Error.')
return None
if not load_pre:
min_var = self.get_lower_bounds()
max_var = self.get_upper_bounds()
deltas = self.get_deltas()
spacings = np.arange(min_var[0], max_var[0],
deltas[0])
resulting_geo = list()
if self._new_harmonize_spacing.get():
geo_results = dict()
for spacing in spacings:
this_min_var = copy.deepcopy(min_var)
this_min_var[0] = spacing
this_max_var = copy.deepcopy(max_var)
this_max_var[0] = spacing
geo_results = op.run_optmizataion(initial_structure_obj=init_objects,min_var=this_min_var,
max_var=this_max_var,lateral_pressure=lateral_press,
deltas=self.get_deltas(), algorithm='anysmart',side=pressure_side,
const_chk = contraints,pso_options = self.pso_parameters,
is_geometric=True,fatigue_obj= fatigue_objects,
fat_press_ext_int=fat_press_ext_int,
min_max_span=min_max_span, tot_len=self.opt_get_length(),
frame_height=self.opt_get_distance(), frame_distance = distances,
predefined_stiffener_iter=self._filez,
processes = self._new_processes.get(),
slamming_press=slamming_press, opt_girder_prop=opt_girder_prop,
fdwn = self._new_fdwn.get(), fup = self._new_fdwn.get(),
ml_algo=self._ML_buckling)
resulting_geo.append(geo_results)
#need to find the lowest
for fraction in resulting_geo[0].keys():
weight = float('inf')
best_idx = None
for idx, geo_res in enumerate(resulting_geo):
this_sub_fraction_weight = geo_res[fraction][0]
if this_sub_fraction_weight < weight:
best_idx = idx
weight = this_sub_fraction_weight
geo_results[fraction] = resulting_geo[best_idx][fraction]
else:
geo_results = op.run_optmizataion(initial_structure_obj=init_objects, min_var=self.get_lower_bounds(),
max_var=self.get_upper_bounds(), lateral_pressure=lateral_press,
deltas=self.get_deltas(), algorithm='anysmart', side=pressure_side,
const_chk=contraints, pso_options=self.pso_parameters,
is_geometric=True, fatigue_obj=fatigue_objects,
fat_press_ext_int=fat_press_ext_int,
min_max_span=min_max_span, tot_len=self.opt_get_length(),
frame_height=self.opt_get_distance(), frame_distance=distances,
predefined_stiffener_iter=self._filez,
processes=self._new_processes.get(),
slamming_press=slamming_press, opt_girder_prop=opt_girder_prop,
fdwn=self._new_fdwn.get(), fup=self._new_fdwn.get())
self._geo_results = geo_results
if len([val*2 for val in self._geo_results.keys()]) != 0:
self._ent_option_fractions.destroy()
self._ent_option_fractions = tk.OptionMenu(self._frame, self._new_option_fraction,
*tuple([val*2 for val in self._geo_results.keys()]),
command=self.get_plate_field_options)
self._ent_option_fractions.place(x=self._option_fractions_place[0], y=self._option_fractions_place[1])
#SAVING RESULTS
if save_results:
with open('geo_opt_2.pickle', 'wb') as file:
pickle.dump(geo_results, file)
else:
with open('geo_opt_2.pickle', 'rb') as file:
self._geo_results = pickle.load(file)
self._ent_option_fractions.destroy()
self._ent_option_fractions = tk.OptionMenu(self._frame, self._new_option_fraction,
*tuple([val*2 for val in self._geo_results.keys()]),
command=self.get_plate_field_options)
self._ent_option_fractions.place(x=self._option_fractions_place[0], y=self._option_fractions_place[1])
save_file, filename = None, None
if save_results:
save_file = filedialog.asksaveasfile(mode="w", defaultextension=".txt", title = 'Save results to file')
if save_file is None: # ask saveasfile return `None` if dialog closed with "cancel".
filename = None
else:
filename = save_file.name
save_file, xplot, yplot = self.draw_result_text(self._geo_results, save_to_file=filename)
self.draw_select_canvas(opt_results=self._geo_results, save_file = save_file)
plt.axes(facecolor='lightslategray')
plt.plot(xplot, yplot,color='yellow', linestyle='solid', marker='o',markerfacecolor='white', markersize=6)
plt.xlabel('Length of plate fields [m]')
plt.ylabel('Weight / max weight')
plt.title('Length of plate fields vs. total weight')
plt.grid()
plt.show()
def opt_get_fractions(self):
''' Finding initial number of fractions '''
init_fractions = 0
# finding number of fractions
for dummy_i in range(1, 100):
if 3.8 < self.opt_get_length() / dummy_i < 4.2:
init_fractions = dummy_i
break
to_return = []
for dummy_i in range(init_fractions):
to_return.append(1/init_fractions)
return to_return
def opt_create_struc_obj(self,opt_line):
''' Creating preliminary stucture object from selected optimized line.
The properties of the new line oto be optimized is taken from the closest original line.'''
pt1 = opt_line[0]
pt2 = opt_line[1]
vector = [pt2[0] - pt1[0], pt2[1] - pt1[1]]
point = [pt1[0]+vector[0]*0.5, pt1[1]+vector[1]*0.5]
if self.opt_find_closest_orig_line(point) == None:
return None
objects = [copy.deepcopy(x) if x != None else None for x in
self._line_to_struc[self.opt_find_closest_orig_line(point)]]
objects[0].Plate.set_span(dist(pt1,pt2))
objects[0].Stiffener.set_span(dist(pt1, pt2))
return objects
def opt_find_closest_orig_line(self,coord):
''' Find the closest original line to the optimized line.
Used to create initial structure objects. '''
for key,value in self._line_dict.items():
pt1 = list(self._point_dict['point'+str(value[0])])
pt2 = list(self._point_dict['point'+str(value[1])])
distance = dist(pt2,pt1)
vector = [pt2[0]-pt1[0],pt2[1]-pt1[1]]
current = list(self._point_dict['point'+str(value[0])])
for dummy_i in range(1000):
delta = distance/1000
current[0] += (vector[0]/distance) * delta
current[1] += (vector[1]/distance) * delta
if dist(coord,current) <= 0.1:
if self._line_to_struc[key][0].Plate.get_structure_type() not in ('GENERAL_INTERNAL_NONWT', 'FRAME'):
return key
else:
return None
def opt_get_distance(self):
''' Getting the largest disctance between the two lines to be optimized. '''
if len(self._active_points) == 4:
return dist(self._point_dict[self._active_points[0]],self._point_dict[self._active_points[2]])
else:
return None
def opt_get_length(self):
''' Getting the length of the lines to be optimized. '''
if len(self._active_points)==4:
return dist(self._point_dict[self._active_points[0]],self._point_dict[self._active_points[1]])
def opt_get_fraction_bounds(self, max_len = 6, min_len = 2):
''' Return the fraction bounds(basis upper/lower) to be considered. '''
return int(self.opt_get_length()/max_len), int(self.opt_get_length()/min_len)
def opt_create_frames(self,fractions):
''' Creating frames between the the two lines to be optimized. '''
count = 1
self._opt_frames['opt_frame_start'] = [[self._point_dict[self._active_points[0]][0],
self._point_dict[self._active_points[0]][1]],
[self._point_dict[self._active_points[2]][0],
self._point_dict[self._active_points[2]][1]]]
self._opt_frames['opt_frame_stop'] = [[self._point_dict[self._active_points[1]][0],
self._point_dict[self._active_points[1]][1]],
[self._point_dict[self._active_points[3]][0],
self._point_dict[self._active_points[3]][1]]]
start = 0
for fraction in fractions:
start += fraction
if start != 1:
self._opt_frames['opt_frame'+str(count)] = [[self._point_dict[self._active_points[0]][0] +
round(self.opt_get_length()*start,5),
self._point_dict[self._active_points[0]][1]],
[self._point_dict[self._active_points[2]][0] +
round(self.opt_get_length() * start,5),
self._point_dict[self._active_points[2]][1]]]
count+=1
distances = {'start_dist': dist(self._opt_frames['opt_frame_start'][0], self._opt_frames['opt_frame_start'][1]),
'stop_dist': dist(self._opt_frames['opt_frame_stop'][0], self._opt_frames['opt_frame_stop'][1])}
return self._opt_frames, distances
def opt_create_main_structure(self,frames,start1,stop1,start2,stop2):
''' This creates line definition for the new structure objects.
The scipt searches the line to find frames.'''
line1_coord = self._point_dict[start1],self._point_dict[stop1]
line2_coord = self._point_dict[start2],self._point_dict[stop2]
structure = {}
p1_low,p1_high = list(line1_coord[0]),list(line2_coord[0])
p2_low,p2_high = list(line1_coord[1]),list(line2_coord[1])
vector_low,vector_high = [p2_low[0]-p1_low[0],p2_low[1]-p1_low[1]],[p2_high[0]-p1_high[0],p2_high[1]-p1_high[1]]
# Starting search on the lower or inner line
count = 1
tmp_struc = [p1_low] # starting point defined.
found = None
for frame, coords in frames.items():
current = list(p1_low)
if frame!='opt_frame_start' and frame!='opt_frame_stop':
for jump in range(100):
current[0] += vector_low[0] / 100
current[1] += vector_low[1] / 100
if dist(current,coords[0]) < 0.1 and frame != found:
found = frame
tmp_struc.append(coords[0])
self._opt_structure['opt_struc'+str(count)] = tmp_struc # adding found line
tmp_struc = [coords[0]]
count += 1
tmp_struc.append(p2_low)
self._opt_structure['opt_struc'+str(count)] = tmp_struc # adding found line (end)
count += 1
# Starting search of upper or outer line.
tmp_struc = [p1_high] # starting point defined.
found = None
for frame, coords in frames.items():
current = list(p1_high)
if frame!='opt_frame_start' and frame!='opt_frame_stop':
for jump in range(100):
current[0] += vector_high[0] / 100
current[1] += vector_high[1] / 100
if dist(current,coords[1]) < 0.1 and frame != found:
found = frame
tmp_struc.append(coords[1])
self._opt_structure['opt_struc'+str(count)] = tmp_struc # adding found line
tmp_struc = [coords[1]]
count += 1
tmp_struc.append(p2_high)
self._opt_structure['opt_struc'+str(count)] = tmp_struc # adding found line (end)
return self._opt_structure
def get_running_time(self):
'''
Estimate the running time of the algorithm.
:return:
'''
if self._new_algorithm.get() in ['anysmart', 'anydetail']:
try:
number_of_combinations = \
max((self._new_spacing_upper.get() - self._new_spacing_lower.get()) / self._new_delta_spacing.get(),
1) * \
max((self._new_pl_thk_upper.get() - self._new_pl_thk_lower.get()) / self._new_delta_pl_thk.get(),
1) * \
max((self._new_web_h_upper.get() - self._new_web_h_lower.get()) / self._new_delta_web_h.get(), 1) * \
max((self._new_web_thk_upper.get() - self._new_web_thk_lower.get()) / self._new_delta_web_thk.get(),
1) * \
max((self._new_fl_w_upper.get() - self._new_fl_w_lower.get()) / self._new_delta_fl_w.get(), 1) * \
max((self._new_fl_thk_upper.get() - self._new_fl_thk_lower.get()) / self._new_delta_fl_thk.get(), 1)
return int(number_of_combinations * self.running_time_per_item) * len(self._active_lines)
except TclError:
return 0
else:
try:
return int(self._new_algorithm_random_trials.get() * self.running_time_per_item) * len(
self._active_lines)
except TclError:
return 0
def get_deltas(self):
'''
Return a numpy array of the deltas.
:return:
'''
return np.array([float(self._ent_delta_spacing.get()) / 1000, float(self._new_delta_pl_thk.get()) / 1000,
float(self._new_delta_web_h.get()) / 1000, float(self._new_delta_web_thk.get()) / 1000,
float(self._new_delta_fl_w.get()) / 1000, float(self._new_delta_fl_thk.get()) / 1000])
def update_running_time(self, *args):
'''
Estimate the running time of the algorithm.
:return:
'''
# try:
# self._runnig_time_label.config(text=str(self.get_running_time()))
# except ZeroDivisionError:
# pass # _tkinter.TclError: pass
if self._new_check_buckling_ml_cl.get():
self._new_check_buckling.set(False)
self._new_check_local_buckling.set(False)
def get_upper_bounds(self):
'''
Return an numpy array of upper bounds.
:return:
'''
return np.array([self._new_spacing_upper.get() / 1000, self._new_pl_thk_upper.get() / 1000,
self._new_web_h_upper.get() / 1000, self._new_web_thk_upper.get() / 1000,
self._new_fl_w_upper.get() / 1000, self._new_fl_thk_upper.get() / 1000,
6, 10])
def get_lower_bounds(self):
'''
Return an numpy array of lower bounds.
:return:
'''
return np.array([self._new_spacing_lower.get() / 1000, self._new_pl_thk_lower.get() / 1000,
self._new_web_h_lower.get() / 1000, self._new_web_thk_lower.get() / 1000,
self._new_fl_w_lower.get() / 1000, self._new_fl_thk_lower.get() / 1000,
1, 10])
def checkered(self, line_distance):
'''
Creates a grid in the properties canvas.
:param line_distance:
:return:
'''
# vertical lines at an interval of "line_distance" pixel
for x in range(line_distance, self._prop_canvas_dim[0], line_distance):
self._canvas_opt.create_line(x, 0, x, self._prop_canvas_dim[0], fill="grey", stipple='gray50')
# horizontal lines at an interval of "line_distance" pixel
for y in range(line_distance, self._prop_canvas_dim[1], line_distance):
self._canvas_opt.create_line(0, y, self._prop_canvas_dim[0], y, fill="grey", stipple='gray50')
def draw_properties(self, init_obj=None, opt_obj=None, line=None):
'''
Drawing properties in the canvas.
:return:
'''
ctr_x = self._prop_canvas_dim[0] / 2
ctr_y = self._prop_canvas_dim[1] / 2 + 200
opt_color, opt_stippe = 'red', 'gray12'
m = self._draw_scale
if init_obj != None:
self._canvas_opt.delete('all')
self.checkered(10)
init_color, init_stipple = 'blue', 'gray12'
self._canvas_opt.create_rectangle(0, 0, self._prop_canvas_dim[0] + 10, 80, fill='white')
self._canvas_opt.create_line(10, 10, 30, 10, fill=init_color, width=5)
self._canvas_opt.create_text(270, 10, text='Initial - Pl.: ' + str(init_obj.get_s() * 1000) + 'x' + str(
init_obj.get_pl_thk() * 1000) +
' Stf.: ' + str(init_obj.get_web_h() * 1000) + 'x' + str(
init_obj.get_web_thk() * 1000) + '+' +
str(init_obj.get_fl_w() * 1000) + 'x' + str(
init_obj.get_fl_thk() * 1000),
font='Verdana 8',
fill=init_color)
self._canvas_opt.create_text(120, 30, text='Weight (per Lg width): ' +
str(int(op.calc_weight([init_obj.get_s(),
init_obj.get_pl_thk(),
init_obj.get_web_h(),
init_obj.get_web_thk(),
init_obj.get_fl_w(),
init_obj.get_fl_thk(),
init_obj.get_span(),
init_obj.get_lg()]))),
font='Verdana 8', fill=init_color)
self._canvas_opt.create_rectangle(ctr_x - m * init_obj.get_s() / 2, ctr_y, ctr_x + m * init_obj.get_s() / 2,
ctr_y - m * init_obj.get_pl_thk(), fill=init_color, stipple=init_stipple)
self._canvas_opt.create_rectangle(ctr_x - m * init_obj.get_web_thk() / 2, ctr_y - m * init_obj.get_pl_thk(),
ctr_x + m * init_obj.get_web_thk() / 2,
ctr_y - m * (init_obj.get_web_h() + init_obj.get_pl_thk())
, fill=init_color, stipple=init_stipple)
if init_obj.get_stiffener_type() not in ['L', 'L-bulb']:
self._canvas_opt.create_rectangle(ctr_x - m * init_obj.get_fl_w() / 2,
ctr_y - m * (init_obj.get_pl_thk() + init_obj.get_web_h()),
ctr_x + m * init_obj.get_fl_w() / 2,
ctr_y - m * (
init_obj.get_pl_thk() + init_obj.get_web_h() + init_obj.get_fl_thk()),
fill=init_color, stipple=init_stipple)
else:
self._canvas_opt.create_rectangle(ctr_x - m * init_obj.get_web_thk() / 2,
ctr_y - m * (init_obj.get_pl_thk() + init_obj.get_web_h()),
ctr_x + m * init_obj.get_fl_w(),
ctr_y - m * (
init_obj.get_pl_thk() + init_obj.get_web_h() + init_obj.get_fl_thk()),
fill=init_color, stipple=init_stipple)
if opt_obj != None:
self._canvas_opt.create_rectangle(ctr_x - m * opt_obj.get_s() / 2, ctr_y,
ctr_x + m * opt_obj.get_s() / 2,
ctr_y - m * opt_obj.get_pl_thk(), fill=opt_color,
stipple=opt_stippe)
self._canvas_opt.create_rectangle(ctr_x - m * opt_obj.get_web_thk() / 2, ctr_y -
m * opt_obj.get_pl_thk(),
ctr_x + m * opt_obj.get_web_thk() / 2,
ctr_y - m * (
opt_obj.get_web_h() + opt_obj.get_pl_thk())
, fill=opt_color, stipple=opt_stippe)
if init_obj.get_stiffener_type() not in ['L', 'L-bulb']:
self._canvas_opt.create_rectangle(ctr_x - m * opt_obj.get_fl_w() / 2, ctr_y
- m * (
opt_obj.get_pl_thk() + opt_obj.get_web_h()),
ctr_x + m * opt_obj.get_fl_w() / 2, ctr_y -
m * (
opt_obj.get_pl_thk() + opt_obj.get_web_h() +
opt_obj.get_fl_thk()),
fill=opt_color, stipple=opt_stippe)
else:
self._canvas_opt.create_rectangle(ctr_x - m * opt_obj.get_web_thk() / 2, ctr_y
- m * (
opt_obj.get_pl_thk() + opt_obj.get_web_h()),
ctr_x + m * opt_obj.get_fl_w(), ctr_y -
m * (
opt_obj.get_pl_thk() + opt_obj.get_web_h() +
opt_obj.get_fl_thk()),
fill=opt_color, stipple=opt_stippe)
self._canvas_opt.create_line(10, 50, 30, 50, fill=opt_color, width=5)
self._canvas_opt.create_text(270, 50,
text='Optimized - Pl.: ' + str(round(opt_obj.get_s() * 1000,1)) + 'x' +
str(round(opt_obj.get_pl_thk() * 1000,1)) + ' Stf.: '
+ str(round(opt_obj.get_web_h() * 1000,1)) +
'x' + str(round(opt_obj.get_web_thk() * 1000,1)) + '+' +
str(round(opt_obj.get_fl_w() * 1000,1)) +
'x' + str(round(opt_obj.get_fl_thk() * 1000,1)),
font='Verdana 8', fill=opt_color)
self._canvas_opt.create_text(120, 70, text='Weight (per Lg width): '
+ str(int(op.calc_weight([opt_obj.get_s(),
opt_obj.get_pl_thk(),
opt_obj.get_web_h(),
opt_obj.get_web_thk(),
opt_obj.get_fl_w(),
opt_obj.get_fl_thk(),
opt_obj.get_span(),
opt_obj.get_lg()]))),
font='Verdana 8', fill=opt_color)
else:
self._canvas_opt.create_text(150, 60, text='No optimized solution found.')
if line != None:
if __name__ == '__main__':
lateral_press = 0.2 # for testing
else:
lateral_press = self.app.get_highest_pressure(line)['normal'] /1e6
self._canvas_opt.create_text(250, self._prop_canvas_dim[1] - 10,
text='Lateral pressure: ' + str(lateral_press) + ' kPa',
font='Verdana 10 bold', fill='red')
def draw_select_canvas(self, opt_results = None, save_file = None):
'''
Making the lines canvas.
:return:
'''
self._canvas_select.delete('all')
text_type = 'Verdana 8'
if opt_results is None:
# stippled lines and text.
self._canvas_select.create_line(self._canvas_draw_origo[0], 0, self._canvas_draw_origo[0], self._select_canvas_dim[1],
stipple='gray50')
self._canvas_select.create_line(0, self._canvas_draw_origo[1], self._select_canvas_dim[0], self._canvas_draw_origo[1],
stipple='gray50')
self._canvas_select.create_text(self._canvas_draw_origo[0] - 30,
self._canvas_draw_origo[1] + 20, text='(0,0)',
font='Text 10')
self._canvas_select.create_text([700, 50],
text='How to:\n'
'For a double bottom structure: \n'
'Click start point 1 -> click en point 1 (for example bottom plate)\n'
'Click start point 2 -> click en point 2 (for example inner bottom\n'
'Run optimization! Wait for the results...... wait.... wait....\n',
font='Verdana 8 bold',
fill='red')
# drawing the line dictionary.
if len(self._line_dict) != 0:
for line, value in self._line_dict.items():
color = 'black'
coord1 = self.get_point_canvas_coord('point' + str(value[0]))
coord2 = self.get_point_canvas_coord('point' + str(value[1]))
vector = [coord2[0] - coord1[0], coord2[1] - coord1[1]]
# drawing a bold line if it is selected
if self._line_to_struc[line][0].Plate.get_structure_type() not in ('GENERAL_INTERNAL_NONWT','FRAME'):
if line in self._active_lines:
self._canvas_select.create_line(coord1, coord2, width=6, fill=color,stipple='gray50')
self._canvas_select.create_text(coord1[0] + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 + 10,
text='Line ' + str(get_num(line)), font='Verdand 10 bold',
fill='red')
else:
self._canvas_select.create_line(coord1, coord2, width=3, fill=color,stipple='gray25')
self._canvas_select.create_text(coord1[0] - 20 + vector[0] / 2 + 5, coord1[1] + vector[1] / 2 +
10,text='line' + str(get_num(line)),font="Text 8", fill='black')
if len(self._opt_frames) != 0:
for key,value in self._opt_frames.items():
coord1 = self.get_canvas_coord(value[0])
coord2 = self.get_canvas_coord(value[1])
vector = [coord2[0] - coord1[0], coord2[1] - coord1[1]]
self._canvas_select.create_line(coord1, coord2, width=3, fill='SkyBlue1')
else:
pass
if len(self._active_points)>1:
color = 'blue'
coord1 = self.get_point_canvas_coord(self._active_points[0])
coord2 = self.get_point_canvas_coord(self._active_points[1])
vector = [coord2[0] - coord1[0], coord2[1] - coord1[1]]
# drawing a bold line if it is selected
self._canvas_select.create_line(coord1, coord2, width=6, fill=color)
if len(self._active_points) > 3:
coord1 = self.get_point_canvas_coord(self._active_points[2])
coord2 = self.get_point_canvas_coord(self._active_points[3])
vector = [coord2[0] - coord1[0], coord2[1] - coord1[1]]
self._canvas_select.create_line(coord1, coord2, width=6, fill=color)
# self._canvas_select.create_polygon(points, outline='#f11',
# fill='#1f1', width=2)
# drawing the point dictionary
for key,value in self._point_dict.items():
pt_size = 6
if key in self._active_points:
self._canvas_select.create_oval(self.get_point_canvas_coord(key)[0] - pt_size + 2,
self.get_point_canvas_coord(key)[1] - pt_size + 2,
self.get_point_canvas_coord(key)[0] + pt_size + 2,
self.get_point_canvas_coord(key)[1] + pt_size + 2, fill='blue')
if self._active_points.index(key) == 0:
self._canvas_select.create_text(self.get_point_canvas_coord(key)[0] - 5,
self.get_point_canvas_coord(key)[1] - 14, text='START 1',
font=text_type, fill = 'blue')
elif self._active_points.index(key) == 1:
self._canvas_select.create_text(self.get_point_canvas_coord(key)[0] - 5,
self.get_point_canvas_coord(key)[1] - 14,
text='STOP 1',font=text_type, fill='blue')
elif self._active_points.index(key) == 2:
self._canvas_select.create_text(self.get_point_canvas_coord(key)[0] - 5,
self.get_point_canvas_coord(key)[1] - 14,
text='START 2',font=text_type, fill='blue')
elif self._active_points.index(key) == 3:
self._canvas_select.create_text(self.get_point_canvas_coord(key)[0] - 5,
self.get_point_canvas_coord(key)[1] - 14,
text='STOP 2',font=text_type, fill='blue')
else:
pass
else:
self._canvas_select.create_oval(self.get_point_canvas_coord(key)[0] - pt_size,
self.get_point_canvas_coord(key)[1] - pt_size,
self.get_point_canvas_coord(key)[0] + pt_size,
self.get_point_canvas_coord(key)[1] + pt_size, fill='red')
self._canvas_select.create_text(self.get_point_canvas_coord(key)[0] - 5,
self.get_point_canvas_coord(key)[1] - 14, text='pt.'+str(get_num(key)),
font='Verdana 8', fill='blue')
else:
self._canvas_select.create_text([20, 20], text='Results are presented here. '
'All results may not fit the screen. '
'All results are seen in your saved result file.',
font='Verdana 12 bold',
fill='red', anchor = 'w')
delta, start_x, y_loc = 20, 10, 40
for key, values in opt_results.items():
# if y_loc > 700:
# start_x = 400
# y_loc = 40
y_loc = y_loc + delta
check_ok = [val[2] is True for val in values[1]]
if save_file is not None:
save_file.write('\n')
save_file.write('--------------------------------------------------------------------------' + '\n')
save_file.write('Plate fields: '+str(len(values[2]['objects']))+ ' Frames: '+
str(len(values[2]['frames'])) + '\n')
self._canvas_select.create_text([start_x + delta, y_loc],
text=str(len(check_ok))+' panels with weight '+ str(round(values[0],1)),
anchor='w', font=text_type)
y_loc += delta
item_count, endstring = 0, ''
for data_idx, data in enumerate(values[1]):
for idx, stuc_info in enumerate(data):
if type(stuc_info) == calc_structure.AllStructure:
if y_loc > 700:
y_loc = 120
start_x += 350
if item_count == 0:
endstring = ' START 1'+' OK!\n' if values[1][data_idx][3] else ' START 1'+' NOT OK!\n'
elif item_count > 0 and item_count < len(values[1]) / 2-1 and len(values[1]) != 4:
endstring = ' -------'+' OK!\n' if values[1][data_idx][3] else ' -------'+' NOT OK!\n'
elif item_count == len(values[1])/2-1:
endstring = ' -END 1-'+' OK!\n' if values[1][data_idx][3] else ' -END 1-'+' NOT OK!\n'
elif item_count == len(values[1])/2:
endstring = ' START 2'+' OK!\n' if values[1][data_idx][3] else ' START 2'+' NOT OK!\n'
elif item_count > len(values[1])/2 and item_count < len(values[1])-1:
endstring = ' -------'+' OK!\n' if values[1][data_idx][3] else ' -------'+' NOT OK!\n'
elif item_count == len(values[1])-1:
endstring = ' -END 2-'+' OK!\n' if values[1][data_idx][3] else ' -END 2-'+' NOT OK!\n'
self._canvas_select.create_text([start_x + delta, y_loc],
text=stuc_info.get_one_line_string_mixed()+endstring,
anchor='w', font=text_type)
y_loc += 15
if save_file is not None:
save_file.write(stuc_info.get_one_line_string_mixed()+' ' +
stuc_info.get_extended_string_mixed() +
' | ' + stuc_info.Plate.get_report_stresses() +
endstring)
item_count += 1
if save_file is not None:
save_file.write('Weight details for this solution:\n')
save_file.write('Weight of main structure: ' + str([str(round(val, 1))
for val in values[2]['objects']]) + '\n')
save_file.write('Weight of frames: ' + str([str(round(val, 1))
for val in values[2]['frames']]) + '\n')
save_file.write('Scales used on frames: ' +
str([str(round(val, 3)) for val in values[2]['scales']]) + '\n')
save_file.write('----------------------------------------------------------------------------'+'\n')
if save_file is not None:
save_file.write('\n ------------- END ---------------')
save_file.close()
def draw_result_text(self, geo_opt_obj, save_to_file = None):
''' Textual version of the results. '''
self._canvas_opt.delete('all')
start_x = 20
delta = 25
start_y = 60
y_loc = delta + start_y
xplot = list()
yplot = list()
self._canvas_opt.create_text([start_x, 40],
text='Results seen next. Weight index is tot_weight / max_weight \n'
'max_weight is the highest total weight of the checked variations.\n'
'Weight index of 1 is the heaviest calculated variation.',
font='Verdana 10', fill='Blue', anchor='w')
self._canvas_opt.create_text([start_x, y_loc],
text='| Plate fields | Fields length | Weight index | All OK? |',
font='Verdana 10 bold', fill='red', anchor = 'w')
y_loc += delta / 2
self._canvas_opt.create_text([start_x, y_loc],
text='************************************************', anchor='w',
font='Verdana 10 bold')
text_type = 'Verdana 10 bold'
weights = [self._geo_results[key][0] for key in self._geo_results.keys()]
max_weight = 0
for weight in weights:
if weight != float('inf'):
max_weight = weight if weight > max_weight else max_weight
if save_to_file is not None:
save_file = open(save_to_file, 'w')
save_file.write('| Plate fields | Fields length | Weight index | All OK? |\n')
save_file.write('*********************************************************\n')
for key, value in self._geo_results.items():
y_loc = y_loc + delta
check_ok = [val[2] is True for val in value[1]]
self._canvas_opt.create_text([start_x + 20, y_loc ], text=str(len(check_ok)),
anchor='w', font=text_type)
self._canvas_opt.create_text([start_x + 120, y_loc ], text=str('No results\n' if
self._geo_results[key][1][0][0] is None else
round(self._geo_results[key][1][0][0].
Plate.get_span(),4)),
anchor='w', font=text_type)
self._canvas_opt.create_text([start_x + 220, y_loc ],
text=str(round(self._geo_results[key][0] / max_weight, 3))
if max_weight != 0 else '',
anchor='w', font=text_type)
self._canvas_opt.create_text([start_x + 330, y_loc ], text=str(all(check_ok)),
anchor='w', font=text_type)
if save_to_file is not None:
save_file.write(str(len(check_ok))+ ' ' + 'No results\n' if self._geo_results[key][1][0][0] is None
else str(round(self._geo_results[key][1][0][0].Plate.get_span(),
4)) + ' ' +
str(round(self._geo_results[key][0] / max_weight, 3))
+ '\n' if max_weight != 0 else
'' + ' ' + str(all(check_ok))+'\n')
if self._geo_results[key][1][0][0] is not None:
xplot.append(round(self._geo_results[key][1][0][0].Plate.get_span(),4))
yplot.append(round(self._geo_results[key][0] / max_weight, 4))
if save_to_file is not None:
return save_file, xplot, yplot
else:
return None, xplot, yplot
def algorithm_info(self):
''' When button is clicked, info is displayed.'''
messagebox.showinfo(title='Algorith information',
message='The algorithms currently included is:\n'
'ANYSMART: \n'
' Calculates all alternatives using upper and lower bounds.\n'
' The step used inside the bounds is defined in deltas.\n\n'
'RANDOM: \n'
' Uses the same bounds and deltas as in ANYSMART.\n'
' Number of combinations calculated is defined in "trials",\n'
' which selects withing the bounds and deltas defined.\n\n'
'RANDOM_NO_BOUNDS:\n'
' Same as RANDOM, but does not use the defined deltas.\n'
' The deltas is set to 1 mm for all dimensions/thicknesses.\n\n'
'ANYDETAIL:\n'
' Same as for ANYSMART, but will take some more time and\n'
' provide a chart of weight development during execution.\n\n'
'PSO - Particle Swarm Search:\n'
' The information can be found on \n'
' http://pythonhosted.org/pyswarm/ \n'
' For further information google it!\n'
' Parameters:\n'
' swarmsize : The number of particles in the swarm (Default: 100)\n'
' omega : Particle velocity scaling factor (Default: 0.5)\n'
' phip : Scaling factor to search away from the particle’s \n'
' best known position (Default: 0.5)\n'
' phig : Scaling factor to search away from the swarm’s best \n'
' known position (Default: 0.5)\n'
' maxiter : The maximum number of iterations for the swarm \n'
' to search (Default: 100)\n'
' minstep : The minimum stepsize of swarm’s best position \n'
' before the search terminates (Default: 1e-8)\n'
' minfunc : The minimum change of swarm’s best objective value\n'
' before the search terminates (Default: 1e-8)\n\n'
'\n'
'All algorithms calculates local scantling and buckling requirements')
def slider_used(self, event):
'''
Action when slider is activated.
:return:
'''
self._canvas_scale = self.slider.get()
self.draw_canvas()
def on_closing(self):
'''
Action when closing the window without saving.
:return:
'''
if __name__ == '__main__':
self._frame.destroy()
return
mess = tk.messagebox.showwarning('Closed without saving', 'Closing will not save loads you have created',
type='okcancel')
if mess == 'ok':
self._frame.grab_release()
self._frame.destroy()
self.app.on_aborted_load_window()
def get_point_canvas_coord(self, point_no):
'''
Returning the canvas coordinates of the point. This value will change with slider.
:param point_no:
:return:
'''
point_coord_x = self._canvas_draw_origo[0] + self._point_dict[point_no][0]* self._canvas_scale
point_coord_y = self._canvas_draw_origo[1] - self._point_dict[point_no][1]* self._canvas_scale
return [point_coord_x, point_coord_y]
def get_canvas_coord(self, coord):
'''
Returning the canvas coordinates of the point. This value will change with slider.
:param point_no:
:return:
'''
point_coord_x = self._canvas_draw_origo[0] + coord[0] * self._canvas_scale
point_coord_y = self._canvas_draw_origo[1] - coord[1] * self._canvas_scale
return [point_coord_x, point_coord_y]
def controls(self):
'''
Specifying the controls to be used.
:return:
'''
self._canvas_select.bind('<Button-1>', self.button_1_click)
self._canvas_select.bind('<Button-2>', self.button_2_click)
self._canvas_select.bind('<Button-3>', self.button_3_click)
self._frame.bind('<Shift_L>', self.shift_pressed)
self._frame.bind('<Shift_R>', self.shift_pressed)
self._frame.bind('<Control_L>', self.ctrl_pressed)
self._frame.bind('<Control_R>', self.ctrl_pressed)
self._frame.bind("<MouseWheel>", self.mouse_scroll)
self._frame.bind("<B2-Motion>", self.button_2_click_and_drag)
def shift_pressed(self, event=None):
'''
Event is executed when shift key pressed.
:return:
'''
self._add_to_lines = True
def ctrl_pressed(self, event=None):
'''
Event when control is pressed.
:param event:
:return:
'''
self._add_to_lines = False
def button_1_click(self, event):
'''
When clicking the right button, this method is called.
method is referenced in
'''
self._previous_drag_mouse = [event.x, event.y]
# if type(self._geo_results) is not list():
# if self._geo_results is not None:
# return
# else:
# if self._geo_results[0] is not None:
# return
click_x = self._canvas_select.winfo_pointerx() - self._canvas_select.winfo_rootx()
click_y = self._canvas_select.winfo_pointery() - self._canvas_select.winfo_rooty()
self._point_is_active = False
margin = 10
self._active_point = ''
for point, coords in self._point_dict.items():
point_coord = self.get_point_canvas_coord(point)
if point_coord[0]-margin < click_x < point_coord[0]+margin and\
point_coord[1]-margin < click_y < point_coord[1]+margin:
self._active_point = point
self._point_is_active = True
if len(self._active_points)<4:
self._active_points.append(self._active_point)
if len(self._active_points)==4:
self.opt_create_frames(self.opt_get_fractions())
self.draw_select_canvas()
def button_3_click(self, event):
'''
Event when right click.
:param evnet:
:return:
'''
self._previous_drag_mouse = [event.x, event.y]
self._active_lines = []
self._active_points = []
self.draw_select_canvas()
def button_2_click(self, event):
'''
Event when right click.
:param evnet:
:return:
'''
self._previous_drag_mouse = [event.x, event.y]
if self._opt_resutls =={}:
return
click_x = self._canvas_select.winfo_pointerx() - self._canvas_select.winfo_rootx()
click_y = self._canvas_select.winfo_pointery() - self._canvas_select.winfo_rooty()
if len(self._line_dict) > 0:
for key, value in self._line_dict.items():
coord1x = self.get_point_canvas_coord('point' + str(value[0]))[0]
coord2x = self.get_point_canvas_coord('point' + str(value[1]))[0]
coord1y = self.get_point_canvas_coord('point' + str(value[0]))[1]
coord2y = self.get_point_canvas_coord('point' + str(value[1]))[1]
vector = [coord2x - coord1x, coord2y - coord1y]
click_x_range = [ix for ix in range(click_x - 10, click_x + 10)]
click_y_range = [iy for iy in range(click_y - 10, click_y + 10)]
distance = int(dist([coord1x, coord1y], [coord2x, coord2y]))
# checking along the line if the click is witnin +- 10 around the click
for dist_mult in range(1, distance - 1):
dist_mult = dist_mult / distance
x_check = int(coord1x) + int(round(vector[0] * dist_mult, 0))
y_check = int(coord1y) + int(round(vector[1] * dist_mult, 0))
if x_check in click_x_range and y_check in click_y_range:
self._canvas_select.delete('all')
self._active_lines = []
self._active_lines.append(key)
if key in self._opt_resutls.keys() and self._opt_resutls[key] != None:
self.draw_properties(init_obj=self._line_to_struc[key][0],
opt_obj=self._opt_resutls[key][0],
line=key)
else:
self.draw_properties(init_obj=self._line_to_struc[key][0], line=key)
break
self.draw_select_canvas()
self.draw_select_canvas()
self.update_running_time()
#############################
self.opt_create_main_structure(self.opt_create_frames(self.opt_get_fractions())[0],self._active_points[0],
self._active_points[1],self._active_points[2],self._active_points[3])
def save_and_close(self):
'''
Save and close
:return:
'''
if __name__ == '__main__':
self._frame.destroy()
return
try:
to_return = {}
for line in self._active_lines:
to_return[line] = self._opt_resutls[line]
self.app.on_close_opt_multiple_window(to_return)
messagebox.showinfo(title='Return info', message='Returning: ' + str(self._active_lines))
except IndexError:
messagebox.showinfo(title='Nothing to return', message='No results to return.')
return
self._frame.destroy()
def toggle(self, found_files = None, obj = None, iterating = False, given_path: str = None):
'''
On off button.
:param found_files:
:param obj:
:return:
'''
# if iterating:
# if found_files is not None:
# predefined_structure = hlp.helper_read_section_file(files=found_files, obj=obj)
# else:
predefined_structure = None
if self._toggle_btn.config('relief')[-1] == 'sunken':
self._toggle_btn.config(relief="raised")
self._toggle_btn.config(bg = 'salmon')
self._ent_spacing_upper.config(bg = 'white')
self._ent_spacing_lower.config(bg = 'white')
self._ent_delta_spacing.config(bg = 'white')
self._filez = None
else:
self._toggle_btn.config(relief="sunken")
self._toggle_btn.config(bg='lightgreen')
self._ent_spacing_upper.config(bg = 'lightgreen')
self._ent_spacing_lower.config(bg = 'lightgreen')
self._ent_delta_spacing.config(bg = 'lightgreen')
self._ent_pl_thk_upper.config(bg = 'lightgreen')
self._ent_pl_thk_lower.config(bg = 'lightgreen')
self._ent_delta_pl_thk.config(bg = 'lightgreen')
if given_path is None:
self._filez = list(askopenfilenames(parent=self._frame, title='Choose files to open',
initialdir=self._root_dir))
else:
self._filez = [given_path]
if self._filez == []:
self._toggle_btn.config(relief="raised")
self._toggle_btn.config(bg='salmon')
self._ent_spacing_upper.config(bg='white')
self._ent_spacing_lower.config(bg='white')
self._ent_delta_spacing.config(bg='white')
self._ent_pl_thk_upper.config(bg='white')
self._ent_pl_thk_lower.config(bg='white')
self._ent_delta_pl_thk.config(bg='white')
return found_files, predefined_structure
def open_example_file(self):
import os
if os.path.isfile('sections.csv'):
os.startfile('sections.csv')
else:
os.startfile(self._root_dir + '/' + 'sections.csv')
def plot_results(self):
'Plotting a selected panel'
if self._geo_results is not None \
and type(self._new_option_fraction.get()) == int and type(self._new_option_panel.get()) == int:
op.plot_optimization_results(self._geo_results[int(self._new_option_fraction.get()/2)][1]
[self._new_option_panel.get()])
def get_plate_field_options(self, event):
if self._geo_results is not None:
self._ent_option_field.destroy()
to_add = tuple([val for val in range(len(self._geo_results[int(self._new_option_fraction.get()/2)][1]))])
self._ent_option_field = tk.OptionMenu(self._frame, self._new_option_panel, *to_add)
self._ent_option_field.place(x=self._options_panels_place[0], y=self._options_panels_place[1])
def mouse_scroll(self,event):
self._canvas_scale += event.delta/50
self._canvas_scale = 0 if self._canvas_scale < 0 else self._canvas_scale
self.draw_select_canvas()
def button_2_click_and_drag(self,event):
self._canvas_draw_origo = (self._canvas_draw_origo[0]-(self._previous_drag_mouse[0]-event.x),
self._canvas_draw_origo[1]-(self._previous_drag_mouse[1]-event.y))
self._previous_drag_mouse = (event.x,event.y)
self.draw_select_canvas()
if __name__ == '__main__':
root = tk.Tk()
my_app = CreateOptGeoWindow(master=root)
root.mainloop() | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/optimize_geometry.py | optimize_geometry.py |
import tkinter as tk
from _tkinter import TclError
import numpy as np
import time, os, datetime
from tkinter import messagebox
from tkinter.filedialog import askopenfilenames
from multiprocessing import cpu_count
try:
from any_files.calc_structure import CalcScantlings, AllStructure
import any_files.example_data as test
import any_files.example_data as ex
import any_files.helper as hlp
import any_files.optimize as op
except ModuleNotFoundError:
from ANYstructure.any_files.calc_structure import CalcScantlings, AllStructure
import ANYstructure.any_files.example_data as test
import ANYstructure.any_files.example_data as ex
import ANYstructure.any_files.helper as hlp
import ANYstructure.any_files.optimize as op
class CreateOptimizeWindow():
'''
This class initiates the single optimization window.
'''
def __init__(self,master,app=None):
super(CreateOptimizeWindow,self).__init__()
if __name__ == '__main__':
import pickle
Plate = CalcScantlings(ex.obj_dict)
Stiffener = None#CalcScantlings(ex.obj_dict)
Girder = None # CalcScantlings(ex.obj_dict_heavy)
self._initial_calc_obj = AllStructure(Plate=Plate, Stiffener=Stiffener, Girder=Girder,
main_dict=ex.prescriptive_main_dict)
#self._initial_calc_obj = test.get_structure_calc_object(heavy=True)
self._lateral_pressure = 0.2
self._fatigue_object = test.get_fatigue_object()
self._fatigue_pressure = test.get_fatigue_pressures()
self._slamming_pressure = test.get_slamming_pressure()
image_dir = os.path.dirname(__file__)+'\\images\\'
self._PULS_object = None
self._puls_acceptance = 0.87
self._initial_calc_obj.lat_press = self._lateral_pressure/1000
self._ML_buckling = dict() # Buckling machine learning algorithm
self._root_dir = '/\\'
for name, file_base in zip(['cl SP buc int predictor', 'cl SP buc int scaler',
'cl SP ult int predictor', 'cl SP ult int scaler',
'cl SP buc GLGT predictor', 'cl SP buc GLGT scaler',
'cl SP ult GLGT predictor', 'cl SP ult GLGT scaler',
'cl UP buc int predictor', 'cl UP buc int scaler',
'cl UP ult int predictor', 'cl UP ult int scaler',
'cl UP buc GLGT predictor', 'cl UP buc GLGT scaler',
'cl UP ult GLGT predictor', 'cl UP ult GLGT scaler'
],
["ml_files\\CL_output_cl_buc_predictor_In-plane_support_cl_1_SP",
"ml_files\\CL_output_cl_buc_scaler_In-plane_support_cl_1_SP",
"ml_files\\CL_output_cl_ult_predictor_In-plane_support_cl_1_SP",
"ml_files\\CL_output_cl_ult_scaler_In-plane_support_cl_1_SP",
"ml_files\\CL_output_cl_buc_predictor_In-plane_support_cl_2,_3_SP",
"ml_files\\CL_output_cl_buc_scaler_In-plane_support_cl_2,_3_SP",
"ml_files\\CL_output_cl_ult_predictor_In-plane_support_cl_2,_3_SP",
"ml_files\\CL_output_cl_ult_scaler_In-plane_support_cl_2,_3_SP",
"ml_files\\CL_output_cl_buc_predictor_In-plane_support_cl_1_UP",
"ml_files\\CL_output_cl_buc_scaler_In-plane_support_cl_1_UP",
"ml_files\\CL_output_cl_ult_predictor_In-plane_support_cl_1_UP",
"ml_files\\CL_output_cl_ult_scaler_In-plane_support_cl_1_UP",
"ml_files\\CL_output_cl_buc_predictor_In-plane_support_cl_2,_3_UP",
"ml_files\\CL_output_cl_buc_scaler_In-plane_support_cl_2,_3_UP",
"ml_files\\CL_output_cl_ult_predictor_In-plane_support_cl_2,_3_UP",
"ml_files\\CL_output_cl_ult_scaler_In-plane_support_cl_2,_3_UP",
"CL_CSR-Tank_req_cl_predictor",
"CL_CSR-Tank_req_cl_UP_scaler",
"CL_CSR_plate_cl,_CSR_web_cl,_CSR_web_flange_cl,_CSR_flange_cl_predictor",
"CL_CSR_plate_cl,_CSR_web_cl,_CSR_web_flange_cl,_CSR_flange_cl_SP_scaler"]):
self._ML_buckling[name] = None
if os.path.isfile(file_base + '.pickle'):
file = open(file_base + '.pickle', 'rb')
from sklearn.neural_network import MLPClassifier
from sklearn.preprocessing import StandardScaler
self._ML_buckling[name] = pickle.load(file)
file.close()
self._ML_classes = {0: 'N/A',
1: 'A negative utilisation factor is found.',
2: 'At least one of the in-plane loads must be non-zero.',
3: 'Division by zero',
4: 'Overflow',
5: 'The aspect ratio exceeds the PULS code limit',
6: 'The global slenderness exceeds 4. Please reduce stiffener span or increase stiffener height.',
7: 'The applied pressure is too high for this plate field.', 8: 'web-flange-ratio',
9: 'UF below or equal 0.87', 10: 'UF between 0.87 and 1.0', 11: 'UF above 1.0'}
else:
self.app = app
self._initial_calc_obj = app._line_to_struc[app._active_line][0]
self._fatigue_object = app._line_to_struc[app._active_line][2]
try:
self._fatigue_pressure = app.get_fatigue_pressures(app._active_line,
self._fatigue_object.get_accelerations())
except AttributeError:
self._fatigue_pressure = None
try:
self._lateral_pressure = self.app.get_highest_pressure(self.app._active_line)['normal'] / 1e6
except KeyError:
self._lateral_pressure = 0
try:
if self.app.get_highest_pressure(self.app._active_line)['slamming'] is None:
self._slamming_pressure = 0
else:
self._slamming_pressure = self.app.get_highest_pressure(self.app._active_line)['slamming']
except KeyError:
self._slamming_pressure = 0
image_dir = app._root_dir +'\\images\\'
self._root_dir = app._root_dir
self._PULS_object = app._PULS_results
self._puls_acceptance = self.app._new_puls_uf.get()
self._ML_buckling = app._ML_buckling
self._predefined_stiffener_iter = None
self._frame = master
self._frame.wm_title("Optimize structure")
self._frame.geometry('1400x900')
self._frame.grab_set()
self._opt_runned = False
self._opt_results = ()
self._opt_actual_running_time = tk.Label(self._frame,text='',font='Verdana 12 bold')
self._draw_scale = 500
self._canvas_dim = (500, 450)
self._canvas_opt = tk.Canvas(self._frame,width=self._canvas_dim[0], height=self._canvas_dim[1],
background='azure',relief = 'groove', borderwidth=2)
tk.Frame(self._frame,width=770,height=5, bg="grey", colormap="new").place(x=20,y=127)
tk.Frame(self._frame, width=770, height=5, bg="grey", colormap="new").place(x=20, y=167)
self._canvas_opt.place(x=10,y=300)
algorithms = ('anysmart','random','random_no_delta', 'anydetail')
tk.Label(self._frame,text='-- Structural optimizer --',font='Verdana 15 bold').place(x=10,y=10)
if self._initial_calc_obj.Stiffener is not None:
self._spacing = self._initial_calc_obj.Plate.get_s()
self._pl_thk = self._initial_calc_obj.Plate.get_pl_thk()
self._stf_web_h = self._initial_calc_obj.Stiffener.get_web_h()
self._stf_web_thk =self._initial_calc_obj.Stiffener.get_web_thk()
self._fl_w = self._initial_calc_obj.Stiffener.get_fl_w()
self._fl_thk =self._initial_calc_obj.Stiffener.get_fl_thk()
else:
self._spacing = self._initial_calc_obj.Plate.get_s()
self._pl_thk = self._initial_calc_obj.Plate.get_pl_thk()
self._stf_web_h = 0
self._stf_web_thk =0
self._fl_w = 0
self._fl_thk =0
# upper and lower bounds for optimization
#[0.6, 0.012, 0.3, 0.01, 0.1, 0.01]
self._new_spacing_upper = tk.DoubleVar()
self._new_spacing_lower = tk.DoubleVar()
self._new_pl_thk_upper = tk.DoubleVar()
self._new_pl_thk_lower = tk.DoubleVar()
self._new_web_h_upper = tk.DoubleVar()
self._new_web_h_lower = tk.DoubleVar()
self._new_web_thk_upper = tk.DoubleVar()
self._new_web_thk_lower = tk.DoubleVar()
self._new_fl_w_upper = tk.DoubleVar()
self._new_fl_w_lower = tk.DoubleVar()
self._new_fl_thk_upper = tk.DoubleVar()
self._new_fl_thk_lower = tk.DoubleVar()
self._new_span = tk.DoubleVar()
self._new_width_lg = tk.DoubleVar()
self._new_algorithm = tk.StringVar()
self._new_algorithm_random_trials = tk.IntVar()
self._new_swarm_size = tk.IntVar()
self._new_omega = tk.DoubleVar()
self._new_phip = tk.DoubleVar()
self._new_phig = tk.DoubleVar()
self._new_maxiter = tk.IntVar()
self._new_minstep = tk.DoubleVar()
self._new_minfunc = tk.DoubleVar()
self._new_slamming_pressure = tk.DoubleVar()
self._new_fatigue_int_press = tk.DoubleVar()
self._new_fatigue_ext_press = tk.DoubleVar()
ent_w = 10
self._ent_spacing_upper = tk.Entry(self._frame, textvariable = self._new_spacing_upper, width = ent_w)
self._ent_spacing_lower = tk.Entry(self._frame, textvariable=self._new_spacing_lower, width=ent_w)
self._ent_pl_thk_upper= tk.Entry(self._frame, textvariable=self._new_pl_thk_upper, width=ent_w)
self._ent_pl_thk_lower= tk.Entry(self._frame, textvariable=self._new_pl_thk_lower, width=ent_w)
self._ent_web_h_upper = tk.Entry(self._frame, textvariable=self._new_web_h_upper, width=ent_w)
self._ent_web_h_lower = tk.Entry(self._frame, textvariable=self._new_web_h_lower, width=ent_w)
self._ent_web_thk_upper = tk.Entry(self._frame, textvariable=self._new_web_thk_upper, width=ent_w)
self._ent_web_thk_lower = tk.Entry(self._frame, textvariable=self._new_web_thk_lower, width=ent_w)
self._ent_fl_w_upper = tk.Entry(self._frame, textvariable=self._new_fl_w_upper, width=ent_w)
self._ent_fl_w_lower = tk.Entry(self._frame, textvariable=self._new_fl_w_lower, width=ent_w)
self._ent_fl_thk_upper = tk.Entry(self._frame, textvariable=self._new_fl_thk_upper, width=ent_w)
self._ent_fl_thk_lower = tk.Entry(self._frame, textvariable=self._new_fl_thk_lower, width=ent_w)
self._ent_span = tk.Entry(self._frame, textvariable=self._new_span, width=ent_w)
self._ent_width_lg = tk.Entry(self._frame, textvariable=self._new_width_lg, width=ent_w)
self._ent_slamming_pressure = tk.Entry(self._frame, textvariable=self._new_slamming_pressure, width=ent_w)
#additional choices for the random and pso algorithm
self._ent_algorithm = tk.OptionMenu(self._frame,self._new_algorithm,command=self.selected_algorithm,*algorithms)
self._ent_random_trials = tk.Entry(self._frame,textvariable=self._new_algorithm_random_trials)
pso_width = 10
self._ent_swarm_size = tk.Entry(self._frame,textvariable=self._new_swarm_size, width = pso_width)
self._ent_omega = tk.Entry(self._frame,textvariable=self._new_omega, width = pso_width)
self._ent_phip = tk.Entry(self._frame,textvariable=self._new_phip, width = pso_width)
self._ent_phig = tk.Entry(self._frame,textvariable=self._new_phig, width = pso_width)
self._ent_maxiter = tk.Entry(self._frame,textvariable=self._new_maxiter, width = pso_width)
self._ent_minstep = tk.Entry(self._frame,textvariable=self._new_minstep, width = pso_width)
self._ent_minfunc = tk.Entry(self._frame,textvariable=self._new_minfunc, width = pso_width)
self._new_delta_spacing = tk.DoubleVar()
self._new_delta_pl_thk = tk.DoubleVar()
self._new_delta_web_h = tk.DoubleVar()
self._new_delta_web_thk = tk.DoubleVar()
self._new_delta_fl_w = tk.DoubleVar()
self._new_delta_fl_thk = tk.DoubleVar()
self._new_opt_spacing = tk.DoubleVar()
self._new_opt_pl_thk = tk.DoubleVar()
self._new_opt_web_h = tk.DoubleVar()
self._new_opt_web_thk = tk.DoubleVar()
self._new_opt_fl_w = tk.DoubleVar()
self._new_opt_fl_thk = tk.DoubleVar()
self._ent_delta_spacing = tk.Entry(self._frame, textvariable = self._new_delta_spacing, width = ent_w)
self._ent_delta_pl_thk = tk.Entry(self._frame, textvariable = self._new_delta_pl_thk, width = ent_w)
self._ent_delta_web_h = tk.Entry(self._frame, textvariable = self._new_delta_web_h, width = ent_w)
self._ent_delta_web_thk = tk.Entry(self._frame, textvariable = self._new_delta_web_thk, width = ent_w)
self._ent_delta_fl_w = tk.Entry(self._frame, textvariable = self._new_delta_fl_w, width = ent_w)
self._ent_delta_fl_thk = tk.Entry(self._frame, textvariable = self._new_delta_fl_thk, width = ent_w)
bg_col = 'pink'
self._ent_opt_spacing = tk.Entry(self._frame, textvariable=self._new_opt_spacing, width=ent_w,bg=bg_col)
self._ent_opt_pl_thk = tk.Entry(self._frame, textvariable=self._new_opt_pl_thk, width=ent_w,bg=bg_col)
self._ent_opt_web_h = tk.Entry(self._frame, textvariable=self._new_opt_web_h, width=ent_w,bg=bg_col)
self._ent_opt_web_thk = tk.Entry(self._frame, textvariable=self._new_opt_web_thk, width=ent_w,bg=bg_col)
self._ent_opt_fl_w = tk.Entry(self._frame, textvariable=self._new_opt_fl_w, width=ent_w,bg=bg_col)
self._ent_opt_fl_thk = tk.Entry(self._frame, textvariable=self._new_opt_fl_thk, width=ent_w,bg=bg_col)
# stresses in plate and stiffener
self._new_trans_stress_high = tk.DoubleVar()
self._new_trans_stress_low = tk.DoubleVar()
self._new_axial_stress = tk.DoubleVar()
self._new_shear_stress = tk.DoubleVar()
self._new_design_pressure = tk.DoubleVar()
self._new_pressure_side = tk.StringVar()
self._ent_trans_stress_high = tk.Entry(self._frame, textvariable=self._new_trans_stress_high, width=ent_w)
self._ent_trans_stress_low = tk.Entry(self._frame, textvariable=self._new_trans_stress_low, width=ent_w)
self._ent_axial_stress = tk.Entry(self._frame, textvariable=self._new_axial_stress, width=ent_w)
self._ent_design_pressure = tk.Entry(self._frame, textvariable=self._new_design_pressure, width=ent_w)
self._ent_design_pressure_side = tk.OptionMenu(self._frame,self._new_pressure_side,*('p','s'))
self._ent_shear_stress = tk.Entry(self._frame, textvariable=self._new_shear_stress, width=ent_w)
start_x,start_y,dx,dy = 20,100,100,40
self._new_processes = tk.IntVar()
self._new_processes.set(max(cpu_count() - 1, 1))
tk.Label(self._frame, text='Processes\n (CPUs)', font='Verdana 9 bold', bg = 'silver')\
.place(x=start_x + 8.3 * dx, y=start_y - 1.1 * dy)
tk.Entry(self._frame, textvariable=self._new_processes, width = 12, bg = 'silver')\
.place(x=start_x + 8.3 * dx, y=start_y - 0.3 * dy)
tk.Label(self._frame,text='Upper bounds [mm]',font='Verdana 9').place(x=start_x,y=start_y)
tk.Label(self._frame, text='Iteration delta [mm]',font='Verdana 9').place(x=start_x, y=start_y+dy)
tk.Label(self._frame, text='Lower bounds [mm]',font='Verdana 9').place(x=start_x, y=start_y+2*dy)
tk.Label(self._frame, text='Spacing [mm]', font='Verdana 7 bold').place(x=start_x + 1.97 * dx, y=start_y-0.6*dy)
tk.Label(self._frame, text='Plate thk. [mm]', font='Verdana 7 bold').place(x=start_x + 2.97 * dx, y=start_y-0.6*dy)
tk.Label(self._frame, text='Web height [mm]', font='Verdana 7 bold').place(x=start_x + 3.97 * dx, y=start_y-0.6*dy)
tk.Label(self._frame, text='Web thk. [mm]', font='Verdana 7 bold').place(x=start_x + 4.97 * dx, y=start_y-0.6*dy)
tk.Label(self._frame, text='Flange width [mm]', font='Verdana 7 bold').place(x=start_x + 5.97 * dx, y=start_y-0.6*dy)
tk.Label(self._frame, text='Flange thk. [mm]', font='Verdana 7 bold').place(x=start_x + 6.97 * dx, y=start_y-0.6*dy)
tk.Label(self._frame, text='--------- Number of combinations to run --------->\n'
'PULS buckling is time consuming, about 0.2 sec. per comb.\n'
'RP-C203 is much faster and can run many more combinations, 1M+.\n'
'ML-CL is about as fast as RP-C203.',
font='Verdana 9 bold').place(x=start_x+0.1*dx, y=start_y + 2.8 * dy, anchor = tk.NW)
self._runnig_time_label = tk.Label(self._frame, text='',font='Verdana 12 bold', fg = 'red')
self._runnig_time_label.place(x=start_x+4.3*dx, y=start_y + 2.8 * dy)
#tk.Label(self._frame, text='seconds ',font='Verdana 9 bold').place(x=start_x+6*dx, y=start_y + 2.8 * dy)
self._result_label = tk.Label(self._frame, text = '',font = 'Verdana 9 bold' )
self._result_label.place(x=start_x, y=start_y + 4.2 * dy)
self._ent_spacing_upper.place(x=start_x+dx*2,y=start_y)
self._ent_delta_spacing.place(x=start_x+dx*2,y=start_y+dy)
self._ent_spacing_lower.place(x=start_x+dx*2,y=start_y+2*dy)
self._ent_pl_thk_upper.place(x=start_x+dx*3,y=start_y)
self._ent_delta_pl_thk.place(x=start_x+dx*3,y=start_y+dy)
self._ent_pl_thk_lower.place(x=start_x+dx*3,y=start_y+2*dy)
self._ent_web_h_upper.place(x=start_x+dx*4,y=start_y)
self._ent_delta_web_h.place(x=start_x+dx*4,y=start_y+dy)
self._ent_web_h_lower.place(x=start_x+dx*4,y=start_y+2*dy)
self._ent_web_thk_upper.place(x=start_x+dx*5,y=start_y)
self._ent_delta_web_thk.place(x=start_x+dx*5,y=start_y+dy)
self._ent_web_thk_lower.place(x=start_x+dx*5,y=start_y+2*dy)
self._ent_fl_w_upper.place(x=start_x+dx*6,y=start_y)
self._ent_delta_fl_w.place(x=start_x+dx*6,y=start_y+dy)
self._ent_fl_w_lower.place(x=start_x+dx*6,y=start_y+2*dy)
self._ent_fl_thk_upper.place(x=start_x+dx*7,y=start_y)
self._ent_delta_fl_thk.place(x=start_x+dx*7,y=start_y+dy)
self._ent_fl_thk_lower.place(x=start_x+dx*7,y=start_y+2*dy)
###
# tk.Label(self._frame,text='Optimized result:\n')\
# .place(x=start_x,y=start_y+ver_mult*dy*0.9)
dx_mult = 0.7
tk.Label(self._frame,text='Optimized values').place(x=start_x,y=start_y+17*dy)
tk.Label(self._frame, text='s').place(x=start_x, y=start_y + 18 * dy)
tk.Label(self._frame, text='pl_thk').place(x=start_x, y=start_y + 19 * dy)
self._ent_opt_spacing.place(x=start_x+dx_mult*dx,y=start_y+18*dy)
self._ent_opt_pl_thk.place(x=start_x+dx_mult*dx,y=start_y+19*dy)
tk.Label(self._frame, text='web_h').place(x=start_x+2*dx_mult*dx, y=start_y + 18 * dy)
tk.Label(self._frame, text='web_htk').place(x=start_x+2*dx_mult*dx, y=start_y + 19 * dy)
self._ent_opt_web_h.place(x=start_x+3*dx_mult*dx,y=start_y+18*dy)
self._ent_opt_web_thk.place(x=start_x+3*dx_mult*dx,y=start_y+19*dy)
tk.Label(self._frame, text='fl_thk').place(x=start_x+4*dx_mult*dx, y=start_y + 18 * dy)
tk.Label(self._frame, text='fl_ttk.').place(x=start_x+4*dx_mult*dx, y=start_y + 19 * dy)
self._ent_opt_fl_w.place(x=start_x+5*dx_mult*dx,y=start_y+18*dy)
self._ent_opt_fl_thk.place(x=start_x+5*dx_mult*dx,y=start_y+19*dy)
#Labels for the pso
self._lb_swarm_size = tk.Label(self._frame,text='swarm size')
self._lb_omega = tk.Label(self._frame,text='omega')
self._lb_phip = tk.Label(self._frame,text='phip')
self._lb_phig = tk.Label(self._frame,text='phig')
self._lb_maxiter = tk.Label(self._frame,text='maxiter')
self._lb_minstep = tk.Label(self._frame,text='minstep')
self._lb_minfunc = tk.Label(self._frame,text='minfunc')
###
tk.Label(self._frame, text='Sigma,y1_Sd - large transversal stress', font='Verdana 9')\
.place(x=start_x+dx*5,y=start_y+11.5*dy)
tk.Label(self._frame, text='MPa', font='Verdana 9')\
.place(x=start_x+dx*9,y=start_y+11.5*dy)
tk.Label(self._frame, text='Sigma,y2_Sd - small transversal stress', font='Verdana 9')\
.place(x=start_x+dx*5,y=start_y+12.5*dy)
tk.Label(self._frame, text='MPa', font='Verdana 9')\
.place(x=start_x+dx*9,y=start_y+12.5*dy)
tk.Label(self._frame, text='Sigma,x_Sd - axial stress', font='Verdana 9')\
.place(x=start_x+dx*5,y=start_y+13.5*dy)
tk.Label(self._frame, text='MPa', font='Verdana 9')\
.place(x=start_x+dx*9,y=start_y+13.5*dy)
tk.Label(self._frame, text='Tau,xy - shear stress', font='Verdana 9')\
.place(x=start_x+dx*5,y=start_y+14.5*dy)
tk.Label(self._frame, text='MPa', font='Verdana 9')\
.place(x=start_x+dx*9,y=start_y+14.5*dy)
tk.Label(self._frame, text='Applied pressure ', font='Verdana 9 bold')\
.place(x=start_x+dx*5,y=start_y+15.5*dy)
tk.Label(self._frame, text='kPa', font='Verdana 9')\
.place(x=start_x+dx*9,y=start_y+15.5*dy)
tk.Label(self._frame, text='Plate or stiffener side (p/s): ', font='Verdana 9 bold')\
.place(x=start_x+dx*9.5,y=start_y+15.5*dy)
tk.Label(self._frame, text='Span: ', font='Verdana 9') \
.place(x=start_x + dx * 5, y=start_y + 16.5 * dy)
tk.Label(self._frame, text='m', font='Verdana 9')\
.place(x=start_x+dx*9,y=start_y+16.5*dy)
tk.Label(self._frame, text='Girder length,Lg: ', font='Verdana 9') \
.place(x=start_x + dx * 5, y=start_y + 17.5 * dy)
tk.Label(self._frame, text='m', font='Verdana 9')\
.place(x=start_x+dx*9,y=start_y+17.5*dy)
tk.Label(self._frame, text='Slamming pressure ', font='Verdana 9') \
.place(x=start_x + dx * 5, y=start_y + 18.5 * dy)
tk.Label(self._frame, text='Pa', font='Verdana 9')\
.place(x=start_x+dx*9,y=start_y+18.5*dy)
if self._fatigue_pressure is not None:
tk.Label(self._frame, text='Fatigue pressure: internal= '+str(self._fatigue_pressure['p_int'])+ ' external= '
+str(self._fatigue_pressure['p_ext']), font='Verdana 7') \
.place(x=start_x + dx * 5, y=start_y + 19.3 * dy)
else:
tk.Label(self._frame, text='Fatigue pressure: internal= '+str(0)+ ' external= '
+str(0), font='Verdana 7') \
.place(x=start_x + dx * 5, y=start_y + 19.3 * dy)
self._ent_trans_stress_high.place(x=start_x+dx*8,y=start_y+11.5*dy)
self._ent_trans_stress_low.place(x=start_x+dx*8,y=start_y+12.5*dy)
self._ent_axial_stress.place(x=start_x+dx*8,y=start_y+13.5*dy)
self._ent_shear_stress.place(x=start_x + dx * 8, y=start_y + 14.5 * dy)
self._ent_design_pressure.place(x=start_x + dx * 8, y=start_y + 15.5 * dy)
self._ent_design_pressure_side.place(x=start_x + dx * 12, y=start_y + 15.5 * dy)
self._ent_span.place(x=start_x + dx * 8, y=start_y + 16.5 * dy)
self._ent_width_lg.place(x=start_x + dx * 8, y=start_y + 17.5 * dy)
self._ent_slamming_pressure.place(x=start_x + dx * 8, y=start_y + 18.5 * dy)
#setting default values
init_dim = float(10) #mm
init_thk = float(1) #mm
self._new_delta_spacing.set(init_dim)
self._new_delta_pl_thk.set(init_thk)
self._new_delta_web_h.set(init_dim)
self._new_delta_web_thk.set(init_thk)
self._new_delta_fl_w.set(init_dim)
self._new_delta_fl_thk.set(init_thk)
self._new_trans_stress_high.set(self._initial_calc_obj.Plate.get_sigma_y1())
self._new_trans_stress_low.set(self._initial_calc_obj.Plate.get_sigma_y2())
self._new_axial_stress.set(self._initial_calc_obj.Plate.get_sigma_x1())
self._new_shear_stress.set(self._initial_calc_obj.Plate.get_tau_xy())
self._new_design_pressure.set(self._lateral_pressure)
self._new_slamming_pressure.set(self._slamming_pressure)
if self._fatigue_pressure is None:
self._new_fatigue_ext_press.set(0), self._new_fatigue_int_press.set(0)
else:
self._new_fatigue_int_press.set(self._fatigue_pressure['p_int']), \
self._new_fatigue_ext_press.set(self._fatigue_pressure['p_ext'])
self._new_spacing_upper.set(round(self._spacing*1000,5))
self._new_spacing_lower.set(round(max(self._spacing*1000,0),5))
self._new_pl_thk_upper.set(round(self._pl_thk*1000+10,5))
self._new_pl_thk_lower.set(round(max(self._pl_thk*1000-10,float(10)),5))
self._new_web_h_upper.set(round(self._stf_web_h*1000+100,5))
self._new_web_h_lower.set(round(max(self._stf_web_h*1000-100,100),5))
self._new_web_thk_upper.set(round(self._stf_web_thk*1000+10,5))
self._new_web_thk_lower.set(round(max(self._stf_web_thk*1000-10,float(10)),5))
if self._initial_calc_obj.Stiffener is not None:
if self._initial_calc_obj.Stiffener.get_stiffener_type() != 'FB':
self._new_fl_w_upper.set(min(round(self._fl_w*1000+100,5), 200))
self._new_fl_w_lower.set(round(max(self._fl_w*1000-100,100),5))
self._new_fl_thk_upper.set(round(self._fl_thk*1000+10,15))
self._new_fl_thk_lower.set(round(max(self._fl_thk*1000-10,10),15))
else:
self._new_fl_w_upper.set(0)
self._new_fl_w_lower.set(0)
self._new_fl_thk_upper.set(0)
self._new_fl_thk_lower.set(0)
self._new_pressure_side.set('p')
self._new_width_lg.set(10)
self._new_span.set(round(self._initial_calc_obj.Plate.get_span(),5))
self._new_algorithm.set('anysmart')
self._new_algorithm_random_trials.set(100000)
self._new_swarm_size.set(100)
self._new_omega.set(0.5)
self._new_phip.set(0.5)
self._new_phig.set(0.5)
self._new_maxiter.set(100)
self._new_minstep.set(1e-8)
self._new_minfunc.set(1e-8)
self._new_delta_spacing.trace('w',self.update_running_time)
self._new_delta_pl_thk.trace('w',self.update_running_time)
self._new_delta_web_h.trace('w',self.update_running_time)
self._new_delta_web_thk.trace('w',self.update_running_time)
self._new_delta_fl_w.trace('w',self.update_running_time)
self._new_delta_fl_thk.trace('w',self.update_running_time)
self._new_spacing_upper.trace('w',self.update_running_time)
self._new_spacing_lower.trace('w',self.update_running_time)
self._new_pl_thk_upper.trace('w',self.update_running_time)
self._new_pl_thk_lower.trace('w',self.update_running_time)
self._new_web_h_upper.trace('w',self.update_running_time)
self._new_web_h_lower.trace('w',self.update_running_time)
self._new_web_thk_upper.trace('w',self.update_running_time)
self._new_web_thk_lower.trace('w',self.update_running_time)
self._new_fl_w_upper.trace('w',self.update_running_time)
self._new_fl_w_lower.trace('w',self.update_running_time)
self._new_fl_thk_upper.trace('w',self.update_running_time)
self._new_fl_thk_lower.trace('w',self.update_running_time)
self._new_algorithm_random_trials.trace('w',self.update_running_time)
self._new_algorithm.trace('w',self.update_running_time)
self.running_time_per_item = {'PULS':0.2489626556016598, 'RP': 1.009943181818182e-5}
self.initial_weight = op.calc_weight([self._spacing,self._pl_thk,self._stf_web_h,self._stf_web_thk,
self._fl_w,self._fl_thk,self._new_span.get(),self._new_width_lg.get()])
img_file_name = 'img_plate_and_stiffener.gif'
if os.path.isfile('images/' + img_file_name):
file_path = 'images/' + img_file_name
else:
file_path = self._root_dir + '/images/' + img_file_name
photo = tk.PhotoImage(file=file_path)
label = tk.Label(self._frame,image=photo)
label.image = photo # keep a reference!
label.place(x=550, y=300)
tk.Label(self._frame,text='Select algorithm', font = 'Verdana 8 bold').place(x=start_x+dx*11, y=start_y+0.5*dy)
self._ent_algorithm.place(x=start_x+dx*11, y=start_y+dy)
self.algorithm_random_label = tk.Label(self._frame, text='Number of trials')
tk.Button(self._frame,text='algorith information',command=self.algorithm_info,bg='white')\
.place(x=start_x+dx*11, y=start_y+dy*2)
self.run_button = tk.Button(self._frame,text='RUN OPTIMIZATION!', command=self.run_optimizaion, bg='red',
font='Verdana 10 bold',fg='Yellow', relief="raised")
self.run_button.place(x=start_x+dx*8, y=start_y+dy*0.5, relwidth = 0.15)
self.run_results = tk.Button(self._frame,text='show calculated', command=self.plot_results, bg='white',
font='Verdana 10',fg='black')
self.run_results.place(x=start_x+dx*8, y=start_y+dy*1.5)
self._opt_actual_running_time.place(x=start_x+dx*9.5, y=start_y-dy)
self.close_and_save =tk.Button(self._frame,text='Return and replace initial structure with optimized',
command=self.save_and_close,bg='green',font='Verdana 10',fg='yellow')
self.close_and_save.place(x=start_x+dx*5,y=10)
tk.Button(self._frame, text='Open predefined stiffeners example',
command=self.open_example_file, bg='white', font='Verdana 10')\
.place(x=start_x+dx*10,y=10)
# Selection of constraints
self._new_check_sec_mod = tk.BooleanVar()
self._new_check_min_pl_thk = tk.BooleanVar()
self._new_check_shear_area = tk.BooleanVar()
self._new_check_buckling = tk.BooleanVar()
self._new_check_buckling_puls = tk.BooleanVar()
self._new_check_buckling_ml_cl = tk.BooleanVar()
self._new_check_fatigue = tk.BooleanVar()
self._new_check_slamming = tk.BooleanVar()
self._new_check_local_buckling = tk.BooleanVar()
self._new_use_weight_filter = tk.BooleanVar()
self._new_check_sec_mod.set(True)
self._new_check_min_pl_thk.set(True)
self._new_check_shear_area.set(True)
self._new_check_buckling.set(True)
self._new_check_fatigue.set(True)
self._new_check_slamming.set(False)
self._new_check_local_buckling.set(True)
self._new_use_weight_filter.set(True)
self._new_check_buckling_puls.set(False)
self._new_check_buckling_ml_cl.set(False)
self._new_check_buckling_puls.trace('w', self.update_running_time)
self._new_check_buckling_ml_cl.trace('w', self.update_running_time)
start_y = 140
tk.Label(self._frame,text='Check for minimum section mofdulus').place(x=start_x+dx*9.7,y=start_y+4*dy)
tk.Label(self._frame, text='Check for minimum plate thk.').place(x=start_x+dx*9.7,y=start_y+5*dy)
tk.Label(self._frame, text='Check for minimum shear area').place(x=start_x+dx*9.7,y=start_y+6*dy)
tk.Label(self._frame, text='Check for buckling (RP-C201)').place(x=start_x+dx*9.7,y=start_y+7*dy)
tk.Label(self._frame, text='Check for fatigue (RP-C203)').place(x=start_x + dx * 9.7, y=start_y + 8 * dy)
tk.Label(self._frame, text='Check for bow slamming').place(x=start_x + dx * 9.7, y=start_y + 9 * dy)
tk.Label(self._frame, text='Check for local stf. buckling').place(x=start_x + dx * 9.7, y=start_y + 10 * dy)
tk.Label(self._frame, text='Use weight filter (for speed)').place(x=start_x + dx * 9.7, y=start_y + 11 * dy)
tk.Label(self._frame, text='Check for buckling (PULS)').place(x=start_x + dx * 9.7, y=start_y + 12 * dy)
tk.Label(self._frame, text='Check for buckling (ML-CL)').place(x=start_x + dx * 9.7, y=start_y + 13 * dy)
tk.Checkbutton(self._frame,variable=self._new_check_sec_mod).place(x=start_x+dx*12,y=start_y+4*dy)
tk.Checkbutton(self._frame, variable=self._new_check_min_pl_thk).place(x=start_x+dx*12,y=start_y+5*dy)
tk.Checkbutton(self._frame, variable=self._new_check_shear_area).place(x=start_x+dx*12,y=start_y+6*dy)
tk.Checkbutton(self._frame, variable=self._new_check_buckling).place(x=start_x+dx*12,y=start_y+7*dy)
tk.Checkbutton(self._frame, variable=self._new_check_fatigue).place(x=start_x + dx * 12, y=start_y + 8 * dy)
tk.Checkbutton(self._frame, variable=self._new_check_slamming).place(x=start_x + dx * 12, y=start_y + 9 * dy)
tk.Checkbutton(self._frame, variable=self._new_check_local_buckling).place(x=start_x + dx * 12,
y=start_y + 10 * dy)
tk.Checkbutton(self._frame, variable=self._new_use_weight_filter).place(x=start_x + dx * 12,
y=start_y + 11 * dy)
tk.Checkbutton(self._frame, variable=self._new_check_buckling_puls).place(x=start_x + dx * 12,
y=start_y + 12 * dy)
tk.Checkbutton(self._frame, variable=self._new_check_buckling_ml_cl).place(x=start_x + dx * 12,
y=start_y + 13 * dy)
# Stress scaling
self._new_fup = tk.DoubleVar()
self._new_fup.set(0.5)
self._new_fdwn = tk.DoubleVar()
self._new_fdwn.set(1)
tk.Label(self._frame, text='Factor when scaling stresses up, fup')\
.place(x=start_x + dx * 9.7, y=start_y + 16 * dy)
ent_fup = tk.Entry(self._frame, textvariable=self._new_fup, width = 10)
ent_fup.place(x=start_x + dx * 12, y=start_y + 16 * dy)
tk.Label(self._frame, text='Factor when scaling stresses up, fdown')\
.place(x=start_x + dx * 9.7, y=start_y + 17 * dy)
ent_fdwn = tk.Entry(self._frame, textvariable=self._new_fdwn, width = 10)
ent_fdwn.place(x=start_x + dx * 12, y=start_y + 17 * dy)
# tk.Button(self._frame,text='Iterate predefiened stiffeners',command=self.open_multiple_files ,bg='yellow')\
# .place(x=start_x, y=start_y - dy * 2)
self._toggle_btn = tk.Button(self._frame, text="Iterate predefiened stiffeners", relief="raised",
command=self.toggle, bg = 'salmon')
self._toggle_btn.place(x=start_x, y=start_y - dy * 2)
self._toggle_object, self._filez = self._initial_calc_obj, None
self.draw_properties()
self.update_running_time()
def selected_algorithm(self,event):
'''
Action when selecting an algorithm.
:return:
'''
start_x, start_y, dx, dy = 20, 100, 100, 40
if self._new_algorithm.get()=='random' or self._new_algorithm.get()=='random_no_delta':
self._ent_random_trials.place_forget()
self.algorithm_random_label.place_forget()
self._lb_swarm_size.place_forget()
self._lb_omega.place_forget()
self._lb_phip.place_forget()
self._lb_phig.place_forget()
self._lb_maxiter.place_forget()
self._lb_minstep.place_forget()
self._lb_minfunc.place_forget()
self._ent_swarm_size.place_forget()
self._ent_omega.place_forget()
self._ent_phip.place_forget()
self._ent_phig.place_forget()
self._ent_maxiter.place_forget()
self._ent_minstep.place_forget()
self._ent_minfunc.place_forget()
self._ent_random_trials.place(x=start_x+dx*11.3, y=start_y+1.2*dy)
self.algorithm_random_label.place(x=start_x+dx*11.3, y=start_y+0.5*dy)
elif self._new_algorithm.get()=='anysmart' or self._new_algorithm.get()=='anydetail':
self._ent_random_trials.place_forget()
self.algorithm_random_label.place_forget()
self._lb_swarm_size.place_forget()
self._lb_omega.place_forget()
self._lb_phip.place_forget()
self._lb_phig.place_forget()
self._lb_maxiter.place_forget()
self._lb_minstep.place_forget()
self._lb_minfunc.place_forget()
self._ent_swarm_size.place_forget()
self._ent_omega.place_forget()
self._ent_phip.place_forget()
self._ent_phig.place_forget()
self._ent_maxiter.place_forget()
self._ent_minstep.place_forget()
self._ent_minfunc.place_forget()
elif self._new_algorithm.get()=='pso':
y_place_label =11.2
y_place = 12.2
self._ent_random_trials.place_forget()
self._lb_swarm_size.place(x=start_x+dx*y_place_label, y=start_y-2*dy)
self._lb_omega.place(x=start_x+dx*y_place_label, y=start_y-1*dy)
self._lb_phip.place(x=start_x+dx*y_place_label, y=start_y-0*dy)
self._lb_phig.place(x=start_x+dx*y_place_label, y=start_y+1*dy)
self._lb_maxiter.place(x=start_x+dx*y_place_label, y=start_y+2*dy)
self._lb_minstep.place(x=start_x+dx*y_place_label, y=start_y+3*dy)
self._lb_minfunc.place(x=start_x+dx*y_place_label, y=start_y+4*dy)
self._ent_swarm_size.place(x=start_x+dx*y_place, y=start_y-2*dy)
self._ent_omega.place(x=start_x+dx*y_place, y=start_y-1*dy)
self._ent_phip.place(x=start_x+dx*y_place, y=start_y+0*dy)
self._ent_phig.place(x=start_x+dx*y_place, y=start_y+1*dy)
self._ent_maxiter.place(x=start_x+dx*y_place, y=start_y+2*dy)
self._ent_minstep.place(x=start_x+dx*y_place, y=start_y+3*dy)
self._ent_minfunc.place(x=start_x+dx*y_place, y=start_y+4*dy)
def modify_structure_object(self):
''' Chaning parameters in the structure object before running. '''
pass
def run_optimizaion(self):
'''
function for button
:return:
'''
self.run_button.config(bg = 'white')
self.run_button.config(fg='red')
self.run_button.config(text='RUNNING OPTIMIZATION')
self.run_button.config(relief="sunken")
self._opt_actual_running_time.config(text='Run started ' + datetime.datetime.now().strftime("%H:%M:%S"))
self._opt_actual_running_time.update()
t_start = time.time()
self._opt_results, self._opt_runned = (), False
if self._PULS_object is not None:
puls_sheet_location = self._PULS_object.puls_sheet_location
puls_acceptance = self._puls_acceptance
if self._new_check_buckling_puls.get() == True:
if puls_sheet_location is None or not os.path.isfile(
puls_sheet_location):
tk.messagebox.showerror('No PULS excel sheet located', 'Set location of PULS excel sheet.\n'
'Note that PULS excel may require 32 bit '
'office.\n\n'
'A sheet may be provided but does not exist'
' in :\n'
+ self._PULS_results.puls_sheet_location +
'\n\n Return to main window an run one or more lines in PULS.')
else:
puls_sheet_location = None
puls_acceptance =0.87
self.pso_parameters = (self._new_swarm_size.get(),self._new_omega.get(),self._new_phip.get(),
self._new_phig.get(),
self._new_maxiter.get(),self._new_minstep.get(),self._new_minfunc.get())
contraints = (self._new_check_sec_mod.get(),self._new_check_min_pl_thk.get(),
self._new_check_shear_area.get(), self._new_check_buckling.get(),
self._new_check_fatigue.get(), self._new_check_slamming.get(),
self._new_check_local_buckling.get(), self._new_check_buckling_puls.get(),
self._new_check_buckling_ml_cl.get(), False)
self._initial_calc_obj.Plate.set_span(self._new_span.get())
if self._fatigue_pressure is not None:
fat_press = ((self._fatigue_pressure['p_ext']['loaded'],self._fatigue_pressure['p_ext']['ballast'],
self._fatigue_pressure['p_ext']['part']),
(self._fatigue_pressure['p_int']['loaded'],self._fatigue_pressure['p_int']['ballast'],
self._fatigue_pressure['p_int']['part']))
else:
fat_press = None
self._opt_results= op.run_optmizataion(self._initial_calc_obj,self.get_lower_bounds(),
self.get_upper_bounds(),self._new_design_pressure.get(),
self.get_deltas(),algorithm=self._new_algorithm.get(),
trials=self._new_algorithm_random_trials.get(),
side=self._new_pressure_side.get(),
const_chk=contraints,pso_options = self.pso_parameters,
fatigue_obj=self._fatigue_object,
fat_press_ext_int=fat_press,
slamming_press = self._new_slamming_pressure.get(),
predefined_stiffener_iter=self._predefined_stiffener_iter,
processes=self._new_processes.get(),
use_weight_filter = False if self._new_check_buckling_puls.get()
else self._new_use_weight_filter.get(),
puls_sheet = puls_sheet_location, puls_acceptance = puls_acceptance,
fdwn = self._new_fdwn.get(), fup = self._new_fdwn.get(),
ml_algo= self._ML_buckling)
if self._opt_results is not None and self._opt_results[0] is not None:
self._opt_actual_running_time.config(text='Actual running time: \n'
+str(round((time.time()-t_start)/60,4))+' min')
self._opt_actual_running_time.update()
self._opt_runned = True
if self._opt_results[0].Stiffener is not None:
text = 'Optimization result | Spacing: ' + str(round(self._opt_results[0].Plate.get_s(), 10) * 1000) +\
' Plate thickness: ' + str(round(self._opt_results[0].Plate.get_pl_thk() * 1000, 10)) +\
' Stiffener - T' + str(round(self._opt_results[0].Stiffener.get_web_h() * 1000, 10)) + 'x'\
+str(round(self._opt_results[0].Stiffener.get_web_thk() * 1000, 10)) +\
'+' + str(round(self._opt_results[0].Stiffener.get_fl_w() * 1000, 10)) + 'x'\
+str(round(self._opt_results[0].Stiffener.get_fl_thk() * 1000, 10))
else:
text = 'Optimization result | Spacing: ' + str(round(self._opt_results[0].Plate.get_s(), 10) * 1000) +\
' Plate thickness: ' + str(round(self._opt_results[0].Plate.get_pl_thk() * 1000, 10))
self._result_label.config(text= text)
self._new_opt_spacing.set(round(self._opt_results[0].Plate.get_s(),5))
self._new_opt_pl_thk.set(round(self._opt_results[0].Plate.get_pl_thk(),5))
if self._opt_results[0].Stiffener is not None:
self._new_opt_web_h.set(round(self._opt_results[0].Stiffener.get_web_h(),5))
self._new_opt_web_thk.set(round(self._opt_results[0].Stiffener.get_web_thk(),5))
self._new_opt_fl_w.set(round(self._opt_results[0].Stiffener.get_fl_w(),5))
self._new_opt_fl_thk.set(round(self._opt_results[0].Stiffener.get_fl_thk(),5))
self.draw_properties()
else:
messagebox.showinfo(title='Nothing found', message='No better alternatives found. Modify input.\n'
'There may be no alternative that is acceptable.\n')
self.run_button.config(bg='green')
self.run_button.config(fg='yellow')
self.run_button.config(text='RUN OPTIMIZATION')
self.run_button.config(relief="raised")
def get_running_time(self):
'''
Estimate the running time of the algorithm.
:return:
'''
if self._new_algorithm.get() in ['anysmart','anydetail']:
all_combs = op.any_get_all_combs(self.get_lower_bounds(), self.get_upper_bounds(), self.get_deltas(),
predef_stiffeners=None if self._predefined_stiffener_iter is None else
[item.get_tuple() for item in self._predefined_stiffener_iter])
number_of_combinations = len([val for val in all_combs])
return int(number_of_combinations * self.running_time_per_item['PULS' if self._new_check_buckling_puls.get()
else 'RP']), number_of_combinations
elif self._new_algorithm.get() in ['pso','random','random_no_delta']:
try:
number_of_combinations = \
max((self._new_spacing_upper.get()-self._new_spacing_lower.get())/self._new_delta_spacing.get(),1)* \
max((self._new_pl_thk_upper.get()-self._new_pl_thk_lower.get())/self._new_delta_pl_thk.get(),1)*\
max((self._new_web_h_upper.get()-self._new_web_h_lower.get())/self._new_delta_web_h.get(),1)*\
max((self._new_web_thk_upper.get()-self._new_web_thk_lower.get())/self._new_delta_web_thk.get(),1)*\
max((self._new_fl_w_upper.get()-self._new_fl_w_lower.get())/self._new_delta_fl_w.get(),1)*\
max((self._new_fl_thk_upper.get()-self._new_fl_thk_lower.get())/self._new_delta_fl_thk.get(),1)
return int(number_of_combinations*self.running_time_per_item['PULS' if self._new_check_buckling_puls.get()
else 'RP']),number_of_combinations
except TclError:
return 0,0
else:
try:
return int(self._new_algorithm_random_trials.get() * self.running_time_per_item['PULS' if self._new_check_buckling_puls.get()
else 'RP']),\
self._new_algorithm_random_trials.get()
except TclError:
return 0,0
def get_deltas(self):
'''
Return a numpy array of the deltas.
:return:
'''
return np.array([float(self._new_delta_spacing.get())/1000,float(self._new_delta_pl_thk.get())/1000,
float(self._new_delta_web_h.get())/1000,float(self._new_delta_web_thk.get())/1000,
float(self._new_delta_fl_w.get())/1000,float(self._new_delta_fl_thk.get())/1000])
def update_running_time(self,*args):
'''
Estimate the running time of the algorithm.
:return:
'''
try:
self._runnig_time_label.config(text=str(int(self.get_running_time()[1])) + ' (about '+
str(max(round(self.get_running_time()[1]*self.running_time_per_item['PULS'
if self._new_check_buckling_puls.get()
else 'RP']/60,2), 0.1))+ ' min.)')
except (ZeroDivisionError, TclError):
pass# _tkinter.TclError: pass
if [self._new_check_buckling_ml_cl.get(),self._new_check_buckling_puls.get(),
self._new_check_buckling.get()].count(True) > 1:
tk.messagebox.showerror('You can only select one buckling type. Reselect.')
if self._new_check_buckling_puls.get():
self._new_check_buckling_puls.set(False)
if self._new_check_buckling_ml_cl.get():
self._new_check_buckling_ml_cl.set(False)
if self._new_check_buckling.get():
self._new_check_buckling.set(False)
self._new_check_local_buckling.set(False)
if self._new_check_buckling_puls.get():
if self._PULS_object is None or self._PULS_object.puls_sheet_location is None:
tk.messagebox.showerror('Missing PULS sheet', 'Go back to main window and set a PULS sheet location\n'
'by running one or more lines.')
self._new_check_buckling_puls.set(False)
def get_upper_bounds(self):
'''
Return an numpy array of upper bounds.
:return:
'''
return np.array([self._new_spacing_upper.get()/1000,self._new_pl_thk_upper.get()/1000,
self._new_web_h_upper.get()/1000,self._new_web_thk_upper.get()/1000,
self._new_fl_w_upper.get()/1000,self._new_fl_thk_upper.get()/1000,
self._new_span.get(),self._new_width_lg.get()])
def get_lower_bounds(self):
'''
Return an numpy array of lower bounds.
:return:
'''
return np.array([self._new_spacing_lower.get()/1000,self._new_pl_thk_lower.get()/1000,
self._new_web_h_lower.get()/1000,self._new_web_thk_lower.get()/1000,
self._new_fl_w_lower.get()/1000,self._new_fl_thk_lower.get()/1000,
self._new_span.get(), self._new_width_lg.get()])
def checkered(self,line_distance):
# vertical lines at an interval of "line_distance" pixel
for x in range(line_distance, self._canvas_dim[0], line_distance):
self._canvas_opt.create_line(x, 0, x, self._canvas_dim[0], fill="grey",stipple='gray50')
# horizontal lines at an interval of "line_distance" pixel
for y in range(line_distance, self._canvas_dim[1], line_distance):
self._canvas_opt.create_line(0, y, self._canvas_dim[0], y, fill="grey",stipple='gray50')
def draw_properties(self):
'''
Drawing properties in the canvas.
:return:
'''
self._canvas_opt.delete('all')
self.checkered(10)
ctr_x = self._canvas_dim[0]/2
ctr_y = self._canvas_dim[1]/2+200
m = self._draw_scale
init_color,init_stipple = 'blue','gray12'
opt_color,opt_stippe = 'red','gray12'
self._canvas_opt.create_rectangle(0,0,self._canvas_dim[0]+10,80,fill='white')
self._canvas_opt.create_line(10,10,30,10,fill = init_color,width=5)
self._canvas_opt.create_text(270,10,text='Initial - Pl.: '+str(self._spacing*1000) +'x'+str(self._pl_thk*1000)+
' Stf.: '+str(self._stf_web_h*1000)+'x'+str(self._stf_web_thk*1000)+'+'+
str(self._fl_w*1000)+'x'+str(self._fl_thk*1000), font = 'Verdana 8',
fill = init_color)
self._canvas_opt.create_text(120,30,text='Weight (per Lg width): '+str(int(self.initial_weight)),
font = 'Verdana 8',fill = init_color)
self._canvas_opt.create_rectangle(ctr_x-m*self._spacing/2, ctr_y,ctr_x+m*self._spacing/2,
ctr_y-m*self._pl_thk, fill=init_color, stipple=init_stipple )
self._canvas_opt.create_rectangle(ctr_x - m * self._stf_web_thk / 2, ctr_y-m* self._pl_thk,
ctr_x + m * self._stf_web_thk / 2, ctr_y - m *(self._stf_web_h+self._pl_thk)
, fill=init_color, stipple=init_stipple )
if self._initial_calc_obj.Stiffener is None:
return
if self._initial_calc_obj.Stiffener.get_stiffener_type() not in ['L', 'L-bulb']:
self._canvas_opt.create_rectangle(ctr_x-m*self._fl_w/2, ctr_y-m*(self._pl_thk+self._stf_web_h),
ctr_x+m*self._fl_w/2, ctr_y-m*(self._pl_thk+self._stf_web_h+self._fl_thk),
fill=init_color, stipple=init_stipple)
else:
self._canvas_opt.create_rectangle(ctr_x-m*self._stf_web_thk/2, ctr_y-m*(self._pl_thk+self._stf_web_h),
ctr_x+m*self._fl_w, ctr_y-m*(self._pl_thk+self._stf_web_h+self._fl_thk),
fill=init_color, stipple=init_stipple)
if self._opt_runned:
self._canvas_opt.create_rectangle(ctr_x - m * self._opt_results[0].Stiffener.get_s() / 2, ctr_y,
ctr_x + m * self._opt_results[0].Stiffener.get_s() / 2,
ctr_y - m * self._opt_results[0].Stiffener.get_pl_thk(), fill=opt_color,
stipple=opt_stippe)
self._canvas_opt.create_rectangle(ctr_x - m * self._opt_results[0].Stiffener.get_web_thk() / 2, ctr_y -
m * self._opt_results[0].Stiffener.get_pl_thk(),
ctr_x + m * self._opt_results[0].Stiffener.get_web_thk() / 2,
ctr_y - m * (self._opt_results[0].Stiffener.get_web_h() + self._opt_results[0].Stiffener.get_pl_thk())
, fill=opt_color, stipple=opt_stippe)
if self._opt_results[0].Stiffener.get_stiffener_type() not in ['L', 'L-bulb']:
self._canvas_opt.create_rectangle(ctr_x - m * self._opt_results[0].Stiffener.get_fl_w() / 2, ctr_y
- m * (self._opt_results[0].Stiffener.get_pl_thk()+ self._opt_results[0].Stiffener.get_web_h()),
ctr_x + m * self._opt_results[0].Stiffener.get_fl_w() / 2,ctr_y -
m * (self._opt_results[0].Stiffener.get_pl_thk() + self._opt_results[0].Stiffener.get_web_h() +
self._opt_results[0].Stiffener.get_fl_thk()),
fill=opt_color, stipple=opt_stippe)
else:
self._canvas_opt.create_rectangle(ctr_x - m * self._opt_results[0].Stiffener.get_web_thk() / 2, ctr_y
- m * (self._opt_results[0].Stiffener.get_pl_thk()+ self._opt_results[0].Stiffener.get_web_h()),
ctr_x + m * self._opt_results[0].Stiffener.get_fl_w() ,ctr_y -
m * (self._opt_results[0].Stiffener.get_pl_thk() + self._opt_results[0].Stiffener.get_web_h() +
self._opt_results[0].Stiffener.get_fl_thk()),
fill=opt_color, stipple=opt_stippe)
self._canvas_opt.create_line(10, 50, 30, 50, fill=opt_color, width=5)
self._canvas_opt.create_text(270,50,text='Optimized - Pl.: '+str(round(self._opt_results[0].Stiffener.get_s()*1000,1))
+'x'+ str(round(self._opt_results[0].Stiffener.get_pl_thk()*1000,1))+
' Stf.: '+str(round(self._opt_results[0].Stiffener.get_web_h()*1000,1))+
'x'+str(round(self._opt_results[0].Stiffener.get_web_thk()*1000,1))+'+'+
str(round(self._opt_results[0].Stiffener.get_fl_w()*1000,1))+
'x'+str(round(self._opt_results[0].Stiffener.get_fl_thk()*1000,1)),
font = 'Verdana 8',fill = opt_color)
self._canvas_opt.create_text(120, 70, text='Weight (per Lg width): '
+ str(int(op.calc_weight([self._opt_results[0].Stiffener.get_s(),
self._opt_results[0].Stiffener.get_pl_thk(),
self._opt_results[0].Stiffener.get_web_h(),
self._opt_results[0].Stiffener.get_web_thk(),
self._opt_results[0].Stiffener.get_fl_w(),
self._opt_results[0].Stiffener.get_fl_thk(),
self._new_span.get(),
self._new_width_lg.get()]))),
font='Verdana 8', fill=opt_color)
def save_and_close(self):
'''
Save and close
:return:
'''
if __name__ == '__main__':
self._frame.destroy()
return
try:
self.app.on_close_opt_window(self._opt_results)
except (IndexError, TypeError):
messagebox.showinfo(title='Nothing to return',message='No results to return.')
return
self._frame.destroy()
def algorithm_info(self):
''' When button is clicked, info is displayed.'''
messagebox.showinfo(title='Algorith information',
message='The algorithms currently included is:\n'
'ANYSMART: \n'
' Calculates all alternatives using upper and lower bounds.\n'
' The step used inside the bounds is defined in deltas.\n'
' This algoritm uses MULTIPROCESSING and will be faster.\n\n'
'RANDOM: \n'
' Uses the same bounds and deltas as in ANYSMART.\n'
' Number of combinations calculated is defined in "trials",\n'
' which selects withing the bounds and deltas defined.\n\n'
'RANDOM_NO_BOUNDS:\n'
' Same as RANDOM, but does not use the defined deltas.\n'
' The deltas is set to 1 mm for all dimensions/thicknesses.\n\n'
'ANYDETAIL:\n'
' Same as for ANYSMART, but will take some more time and\n'
' provide a chart of weight development during execution.\n\n'
'PSO - Particle Swarm Search:\n'
' The information can be found on \n'
' http://pythonhosted.org/pyswarm/ \n'
' For further information google it!\n'
' Parameters:\n'
' swarmsize : The number of particles in the swarm (Default: 100)\n'
' omega : Particle velocity scaling factor (Default: 0.5)\n'
' phip : Scaling factor to search away from the particle’s \n'
' best known position (Default: 0.5)\n'
' phig : Scaling factor to search away from the swarm’s best \n'
' known position (Default: 0.5)\n'
' maxiter : The maximum number of iterations for the swarm \n'
' to search (Default: 100)\n'
' minstep : The minimum stepsize of swarm’s best position \n'
' before the search terminates (Default: 1e-8)\n'
' minfunc : The minimum change of swarm’s best objective value\n'
' before the search terminates (Default: 1e-8)\n\n'
'\n'
'All algorithms calculates local scantling and buckling requirements')
def toggle(self):
if self._toggle_btn.config('relief')[-1] == 'sunken':
self._toggle_btn.config(relief="raised")
self._toggle_btn.config(bg = 'salmon')
self._ent_spacing_upper.config(bg = 'white')
self._ent_spacing_lower.config(bg = 'white')
self._ent_delta_spacing.config(bg = 'white')
predefined_stiffener_iter = []
else:
self._toggle_btn.config(relief="sunken")
self._toggle_btn.config(bg = 'salmon')
self._toggle_btn.config(bg='lightgreen')
self._ent_spacing_upper.config(bg = 'lightgreen')
self._ent_spacing_lower.config(bg = 'lightgreen')
self._ent_delta_spacing.config(bg = 'lightgreen')
self._ent_pl_thk_upper.config(bg = 'lightgreen')
self._ent_pl_thk_lower.config(bg = 'lightgreen')
self._ent_delta_pl_thk.config(bg = 'lightgreen')
open_files = askopenfilenames(parent=self._frame, title='Choose files to open',
initialdir=self._root_dir)
# TODO for both stiffeners and girders
self._initial_calc_obj.Stiffener.t = self._initial_calc_obj.Plate.t
self._initial_calc_obj.Stiffener.s = self._initial_calc_obj.Plate.s
predefined_stiffener_iter = hlp.helper_read_section_file(files=list(open_files),
obj=self._initial_calc_obj.Stiffener)
if predefined_stiffener_iter == []:
self._toggle_btn.config(relief="raised")
self._toggle_btn.config(bg = 'salmon')
self._ent_spacing_upper.config(bg = 'white')
self._ent_spacing_lower.config(bg = 'white')
self._ent_delta_spacing.config(bg = 'white')
self._ent_pl_thk_upper.config(bg = 'white')
self._ent_pl_thk_lower.config(bg = 'white')
self._ent_delta_pl_thk.config(bg = 'white')
self._predefined_stiffener_iter = None
else:
self._predefined_stiffener_iter = predefined_stiffener_iter
self.update_running_time()
def open_example_file(self):
import os
if os.path.isfile('sections.csv'):
os.startfile('sections.csv')
else:
os.startfile(self._root_dir + '/' + 'sections.csv')
def show_calculated(self):
''' '''
pass
def plot_results(self):
if len(self._opt_results) != 0:
op.plot_optimization_results(self._opt_results)
def write_result_csv(self):
if len(self._opt_results) != 0:
print(self._opt_results)
def receive_progress_info():
'''
Get progress info from optimization algorithm.
:return:
'''
print('hi')
if __name__ == '__main__':
root = tk.Tk()
my_app = CreateOptimizeWindow(master=root)
root.mainloop() | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/optimize_window.py | optimize_window.py |
import tkinter as tk
from matplotlib import pyplot as plt
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
try:
import any_files.test
except ModuleNotFoundError:
import ANYstructure.any_files.test
import numpy as np
class CreateCompartmentWindow():
def __init__(self, master,app=None):
super(CreateCompartmentWindow, self).__init__()
if __name__ == '__main__':
base_canvas_dim = [1000, 720]
self.canvas_origo = [50, base_canvas_dim[1] - 50]
self.grid = test.get_grid(origo=self.canvas_origo,base_canvas_dim=base_canvas_dim)
self.parent_dimensions = base_canvas_dim
self.to_draw = test.get_to_draw()
else:
self.app = app
self.grid = app._main_grid
self.parent_dimensions = app._canvas_dim
self.to_draw = app._pending_grid_draw
self.parent_origo = app._canvas_origo
frame_dim = (1500,980)
self.canvas_origo = (50,720-50)
self.canvas_dim = (1000,720)
self.frame = master
self.frame.wm_title("Load properties")
self.frame.geometry(str(frame_dim[0])+'x'+str(frame_dim[1]))
self.frame.grab_set()
self.points_child = {}
self.child_dimensions = (self.parent_dimensions[0]-self.parent_dimensions[0]+1, self.parent_dimensions[1]+1)
for line,point in self.to_draw.items():
point1 = (int(point[0][0]),int(point[0][1]))
point2 = (int(point[1][0]),int(point[1][1]))
self.points_child[line] = [point1,point2]
for line, points in self.points_child.items():
for point in self.grid.get_points_along_line(points[0],points[1]):
self.grid.set_barrier(point[0],point[1])
fig = plt.figure()
self.draw_grid()
self.canvas_plt = FigureCanvasTkAgg(fig,self.frame)
self.canvas_plt.show()
self.canvas_plt.get_tk_widget().place(relx=0.5,rely=0.5)
#self.draw_grid()
tk.Button(self.frame,text='DRAW',command=self.draw_grid).place(relx=0.1,rely=0.1)
def __str__(self):
return 'class CreateCompartmentWindow(): Compartment string not implemented'
def draw_grid(self):
'''
Drawing grid
EMPTY = yellow
FULL = red
:return:
'''
def discrete_matshow(data):
# get discrete colormap
cmap = plt.get_cmap('RdBu', np.max(data) - np.min(data) + 1)
# set limits .5 outside true range
mat = plt.matshow(data, cmap=cmap, vmin=np.min(data) - .5, vmax=np.max(data) + .5)
# tell the colorbar to tick at integers
cax = plt.colorbar(mat, ticks=np.arange(np.min(data), np.max(data) + 1))
# # generate data
# a = np.random.randint(1, 20, size=(10, 10))
discrete_matshow(self.grid.get_matrix())
plt.suptitle('Tanks defined by numbers from 2 and up.')
return plt
#plt.show()
def search_dfs(self):
'''
Depth first search method.
:return:
'''
start = (0,0)
stack = make_stack.Stack()
stack.push_item(start)
while len(stack) != 0:
cell = stack.pop_item()
if self.grid.is_empty(cell[0], cell[1]):
self.grid.set_full(cell[0], cell[1])
for item in self.grid.four_neighbors(cell[0], cell[1]):
stack.push_item(item)
def search_bfs(self):
'''
Bredth first search method.
Searcing evry 20th pixel for empty places in the grid. When a empty cell is found, the search starts.
The search ends when no more empty cells are found in the boudnary regions (circular expansion of search).
USE GRID CONVENSION HERE. NOT POINTS.
grid(row,col) is same as grid(y,x)
points uses
point(x , y) is same as grid(col,row)
:return:
'''
compartment_count = 1
cells = 0
el_max = ''
el_min = ''
compartments = {}
for startrow in range(0, self.child_dimensions[1], 20):
for startcol in range(0, self.child_dimensions[0], 20):
if self.grid.is_empty(startrow,startcol):
el_max = ''
el_min = ''
cells = 0
boundary = deque()
boundary.append((startrow,startcol))
corners = []
while len(boundary) != 0:
current_cell = boundary.pop()
#find the min/max elevation, counting cells in tank
if el_max == '':
el_max = current_cell[0]
el_min = current_cell[0]
else:
if current_cell[0] < el_max:
el_max = current_cell[0]
if current_cell[0] > el_min:
el_min = current_cell[0]
cells += 1
neighbors = self.grid.eight_neighbors(current_cell[0], current_cell[1])
#doing serach operations and looking for corners
no_of_barriers = 0
for neighbor in neighbors[0:4]:
if self.grid.get_value(neighbor[0], neighbor[1]) == -1: no_of_barriers += 1
else: pass
if self.grid.is_empty(neighbor[0], neighbor[1]):
self.grid.set_value(neighbor[0], neighbor[1],compartment_count)
boundary.append(neighbor)
#finding corners on diagonal cells
for neighbor in neighbors[4:]:
if self.grid.get_value(neighbor[0], neighbor[1]) == -1: no_of_barriers += 1
else: pass
if no_of_barriers > 4:
corners.append((neighbor[0], neighbor[1]))
# returning values to the program
compartments[compartment_count] = cells, corners
compartment_count += 1
return compartments
if __name__ == '__main__':
root = tk.Tk()
my_app = CreateCompartmentWindow(master=root)
root.mainloop() | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/compartment_window.py | compartment_window.py |
sn_dict = {'B1': {'m1': 4.0, 'log a1': 15.117, 'm2': 5.0, 'log a2': 17.15, 'slope': 10000000.0, 'k': 0.0},
'B2': {'m1': 4.0, 'log a1': 14.885, 'm2': 5.0, 'log a2': 16.86, 'slope': 10000000.0, 'k': 0.0},
'C': {'m1': 3.0, 'log a1': 12.592, 'm2': 5.0, 'log a2': 16.32, 'slope': 10000000.0, 'k': 0.15},
'C1': {'m1': 3.0, 'log a1': 12.449, 'm2': 5.0, 'log a2': 16.08, 'slope': 10000000.0, 'k': 0.15},
'C2': {'m1': 3.0, 'log a1': 12.301, 'm2': 5.0, 'log a2': 15.84, 'slope': 10000000.0, 'k': 0.15},
'D': {'m1': 3.0, 'log a1': 12.164, 'm2': 5.0, 'log a2': 15.61, 'slope': 10000000.0, 'k': 0.2},
'E': {'m1': 3.0, 'log a1': 12.01, 'm2': 5.0, 'log a2': 15.35, 'slope': 10000000.0, 'k': 0.2},
'F': {'m1': 3.0, 'log a1': 11.855, 'm2': 5.0, 'log a2': 15.09, 'slope': 10000000.0, 'k': 0.25},
'F1': {'m1': 3.0, 'log a1': 11.699, 'm2': 5.0, 'log a2': 14.83, 'slope': 10000000.0, 'k': 0.25},
'F3': {'m1': 3.0, 'log a1': 11.546, 'm2': 5.0, 'log a2': 14.58, 'slope': 10000000.0, 'k': 0.25},
'G': {'m1': 3.0, 'log a1': 11.398, 'm2': 5.0, 'log a2': 14.33, 'slope': 10000000.0, 'k': 0.25},
'W1': {'m1': 3.0, 'log a1': 11.261, 'm2': 5.0, 'log a2': 14.1, 'slope': 10000000.0, 'k': 0.25},
'W2': {'m1': 3.0, 'log a1': 11.107, 'm2': 5.0, 'log a2': 13.85, 'slope': 10000000.0, 'k': 0.25},
'W3': {'m1': 3.0, 'log a1': 10.97, 'm2': 5.0, 'log a2': 13.62, 'slope': 10000000.0, 'k': 0.25},
'B1c': {'m1': 4.0, 'log a1': 14.917, 'm2': 5.0, 'log a2': 17.146, 'slope': 1000000.0, 'k': 0.0},
'B2c': {'m1': 4.0, 'log a1': 14.685, 'm2': 5.0, 'log a2': 16.856, 'slope': 1000000.0, 'k': 0.0},
'Cc': {'m1': 3.0, 'log a1': 12.192, 'm2': 5.0, 'log a2': 16.32, 'slope': 1000000.0, 'k': 0.15},
'C1c': {'m1': 3.0, 'log a1': 12.049, 'm2': 5.0, 'log a2': 16.081, 'slope': 1000000.0, 'k': 0.15},
'C2c': {'m1': 3.0, 'log a1': 11.901, 'm2': 5.0, 'log a2': 15.835, 'slope': 1000000.0, 'k': 0.15},
'Dc': {'m1': 3.0, 'log a1': 11.764, 'm2': 5.0, 'log a2': 15.606, 'slope': 1000000.0, 'k': 0.2},
'Ec': {'m1': 3.0, 'log a1': 11.61, 'm2': 5.0, 'log a2': 15.35, 'slope': 1000000.0, 'k': 0.2},
'Fc': {'m1': 3.0, 'log a1': 11.455, 'm2': 5.0, 'log a2': 15.091, 'slope': 1000000.0, 'k': 0.25},
'F1c': {'m1': 3.0, 'log a1': 11.299, 'm2': 5.0, 'log a2': 14.832, 'slope': 1000000.0, 'k': 0.25},
'F3c': {'m1': 3.0, 'log a1': 11.146, 'm2': 5.0, 'log a2': 14.576, 'slope': 1000000.0, 'k': 0.25},
'Gc': {'m1': 3.0, 'log a1': 10.998, 'm2': 5.0, 'log a2': 14.33, 'slope': 1000000.0, 'k': 0.25},
'W1c': {'m1': 3.0, 'log a1': 10.861, 'm2': 5.0, 'log a2': 14.101, 'slope': 1000000.0, 'k': 0.25},
'W2c': {'m1': 3.0, 'log a1': 10.707, 'm2': 5.0, 'log a2': 13.845, 'slope': 1000000.0, 'k': 0.25},
'W3c': {'m1': 3.0, 'log a1': 10.57, 'm2': 5.0, 'log a2': 13.617, 'slope': 1000000.0, 'k': 0.25}}
def get_paramter(curve,parameter):
return sn_dict[curve][parameter]
def get_all_curves():
return sn_dict.keys() | ANYstructure | /ANYstructure-4.10.tar.gz/ANYstructure-4.10/any_files/SN_curve_parameters.py | SN_curve_parameters.py |
AOPython change log
1.0.3
- Removed re/sre weave test from aopythonexamples (it didn't pass on Python 2.4 and it wasn't a good test).
- Improved Aspect.wrap() function
- Updated copywrite dates
- Added setup.py
1.0.2
- API CHANGE: renamed "maxWeaveDepth" parameter of weave function/method to "depth"
- API CHANGE: rearranged "weave" parameters (hopefully handier and more logical now--least likely to be used are now at the end)
- API CHANGE: replaced "weave" flags "wrapMethodWrappers" and "wrapBuiltIns" with "weaveTest", a function that overrides the normal function/method test when specified.
- Aspect.wrap can now wrap any callable object instead of just methods and functions (this fixed a few bugs where objects could be wrapped using wrap but not useing weave).
- Improved Aspect.wrap handling of class objects--it's not perfect yet, but at least classes can be wrapped.
- Added unweave function (weave is to unweave as wrap is to unwrap).
- Added a LOT more tests--weave (and unweave) are now tested more thoroughly.
- Changed aopythonexamples to use doctest (they're actually tests that run with aopythontest now, which is what I intended all along...yay!)
- Improved documentation.
- Changed whitespace to be more consistent with python style (PEP-0008). Keeping tabs and methodNameConvention for now.
1.0.1
- API CHANGE: Changed argument order of weave function: weave(aspects, object, includes...) seems more logical than weave(object, aspects, includes...) since Aspect now has a weave method with that argument order.
- Removed "dangerous method name" check from Aspect.wrap. Rare cases may exist in which those methods may need to be wrapped. Not to mention it's more consistent with the Zen of Python.
- Switched from using types.MethodType to __get__(instance, class) to create methods from functions. The tests seem to run a tiny bit faster, so it may improve performance (probably not).
- Minor code cleanup.
- Minor improvements to documentation.
- Reorganized the aopython module (moved internal functions to the bottom, etc.).
- Added __version__ and __all__ variables to aopython.
- Renamed unwrapDict to _unwrapDict.
1.0
- Initial release
| AOPython | /AOPython-1.0.3.zip/AOPython-1.0.3/README.txt | README.txt |
__version__ = "1.0.2"
from types import FunctionType
from inspect import getmembers, ismethod, isfunction, isclass
from weakref import WeakKeyDictionary
__all__ = ["Aspect", "weave", "iswrapped", "unwrap", "MethodHasNoAdviceError"]
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# public interface
class Aspect(object):
"""Aspect base class. All aspects should extend this class.
Can advise user-defined functions, lambda functions, built-in functions,
unbound (class) methods, and bound (instance) methods."""
def advise(self, method, *args, **kwargs):
"""Override this method to provide advice to the method/function call
This method will normaly invoke the given method/function with the
supplied arguments and return the result. However, it is not required
to do either of those things.
All methods (n/a for functions) are unbound before being passed to
'advise', and the first item in args is the self argument of the
method. Note that the self argument in advise refers to the aspect
instance, not the instance of the wrapped method."""
return method(*args, **kwargs)
def wrap(self, method, allowUnwrap=False):
"""Return the given callable wrapped with the advice provided by this aspect
This function works with any callable object. The returned function/
method has the same __dict__ or a dictionary containing the same
__dict__ items as the unwrapped version if possible.
WARNING: the type of the returned callable may not match the type of
the original callable.
Arguments:
method - the callable to be wrapped
allowUnwrap - if set to True, the returned (wrapped) method can be
unwrapped at a later time. WARNING: the current implementation
uses a dictionary to map the wrapped method to the unwrapped
method, and the size of that dictionary may grow very large if
many methods are wrapped with this flag set to True.
Raises TypeError if the given method is not callable."""
if callable(method):
class_ = getattr(method, 'im_class', None)
self_ = getattr(method, 'im_self', None)
if self_:
unbound = method.im_func.__get__(None, type(self_))
else:
unbound = method
def _method(*args, **kwargs):
return self.advise(unbound, *args, **kwargs)
# Create a new function with the correct __name__ and __dict__
code = _method.func_code
globals_ = getattr(_method, "func_globals", None)
name_ = getattr(method, '__name__', None)
defaults = getattr(getattr(method, 'im_func', method), 'func_defaults', None)
closure = _method.func_closure
_method = FunctionType(code, globals_, name_, defaults, closure)
if isinstance(getattr(method, "__dict__", None), dict):
_method.__dict__ = method.__dict__
else:
try:
_method.__dict__ = dict(method.__dict__)
except:
pass
if allowUnwrap:
_unwrapDict[_method] = method
if class_:
return _method.__get__(self_, class_)
else:
return _method
else:
raise TypeError("cannot apply %s: %r of %s is not callable" % (type(self).__name__, method, type(method)))
def weave(self, obj, includes=(), excludes=(), depth=0, allowUnwrap=False,
wrapIfStartsWithUnderscore=False, weaveTest=None):
"""Convenience method to weave an object with this aspect.
See aopython.weave (a function in this module) for detailed usage
instructions. Note that aopython.weave can weave more than one aspect
in a single weave() call.
"""
weave(self, obj, includes, excludes, depth, allowUnwrap,
wrapIfStartsWithUnderscore, weaveTest)
def weave(aspect, obj, includes=(), excludes=(), depth=0, allowUnwrap=False,
wrapIfStartsWithUnderscore=False, weaveTest=None):
"""Advise each method or function belonging to obj
Arguments:
aspect - an Aspect instance or a sequence of Aspect instances with
which to weave obj. If this argument is a sequence, the order is
important: aspects will be weaved in the order they are listed, and
their advise methods will be called in the opposite order when a
wrapped method/function is called.
obj - an object whose methods/functions/classes will be wrapped.
If this is a class or an instance its methods will be wrapped.
If this is a module all of its functions will be wrapped.
includes - a sequence of method names that will be wrapped.
All methods will be included if this is an empty sequence (default).
excludes - a sequence of method names that will not be wrapped.
No methods will be excluded if this is an empty sequence (default).
Excludes override includes (e.g. if a name is is both includes and
excludes it will be excluded).
depth - the maximum number of levels to be woven. The default
behavior (depth=0) only wraps functions or methods belonging
directly to the object being weaved.
Example for weave(aspect, module, ...):
module.function # depth=0
module.Class # depth=0
module.Class.method # depth=1
Note that if this value is less than zero, the objects on level
zero will still be wrapped.
Note: setting this value to more than one (1) may result in
unpredictable results depending on the target object being wrapped.
allowUnwrap - see the allowUnwrap argument of Aspect.wrap
wrapIfStartsWithUnderscore - if this argument evaluates to True, all
functions and methods with names starting with an underscore will
be elligible to be wrapped. Otherwise they are excluded.
Defaults to False.
weaveTest - a function that tests the objects that are being weaved.
The function must take a single object argument and return True if
the object should be wrapped and False if the object should not be
wrapped. The result of this function overrides the normal test that
is applied to each object to determine if it is a method or
function. includes and excludes are processed after wrapTest.
Note: if this function returns True for a non-callable object an
exception will be raised when the object is wrapped.
includes and excludes may contain predicate functions that match
method names. Each predicate function must take a single string
argument and return True or False.
"""
if weaveTest is None:
def weaveTest(obj):
return ismethod(obj) or isfunction(obj)
weaveTest = _MethodNameMatchingAspect(includes, excludes, wrapIfStartsWithUnderscore).wrap(weaveTest)
if isinstance(aspect, Aspect):
aspects = (aspect,)
else:
aspects = aspect
def _weave(obj, weaveDepth):
methods = getmembers(obj, weaveTest)
weavables = [member for name, member in getmembers(obj)]
if weavables and weaveDepth < depth:
weaveDepth += 1
for weavable in weavables:
_weave(weavable, weaveDepth)
for methodName, method in methods:
for aspect in aspects:
method = aspect.wrap(method, allowUnwrap)
try:
setattr(obj, methodName, method)
except:
pass
_weave(obj, 0)
def iswrapped(method):
"""Return True if the given method can be unwrapped.
NOTICE: this function will only return True if the allowUnwrap flag was set
when the method was wrapped.
"""
return _unwrapDict.has_key(_getFunction(method))
def unwrap(method, all=False, quiet=False):
"""Unwrap the given method
NOTICE: this function will only work if the allowUnwrap flag was set when
the method was wrapped.
Arguments:
all - when True, unwrap all subsequently wrapped functions before
returning. In other words, always return a function that cannot be
unwrapped when True.
quiet - when False (default), raise a MethodHasNoAdviceError if the method
(or any subsequently wrapped methods if all=True) cannot be unwrapped.
When True, return the original method without raising an exception if
the method cannot be unwrapped.
Raises MethodHasNoAdviceError if quiet=False (default).
"""
try:
if not iswrapped(method): raise KeyError()
func = _unwrapDict[_getFunction(method)]
if all and iswrapped(func):
return unwrap(func, all, quiet)
else:
return func
except KeyError:
if not quiet:
raise MethodHasNoAdviceError('%s is not wrapped' % method)
return method
def unweave(obj, all=False, depth=0):
"""Unwrap all wrapped callables on the given object
This function does the opposite of weave without all the conditions
such as includes, excludes, and weaveTest.
Arguments:
all - see 'all' arg of 'unwrap'
depth - see 'depth' arg of 'weave'
"""
def _unweave(obj, weaveDepth):
for methodName, method in getmembers(obj, iswrapped):
method = unwrap(method, all)
setattr(obj, methodName, method)
weavables = [member for name, member in getmembers(obj)]
if weavables and weaveDepth < depth:
weaveDepth += 1
for weavable in weavables:
_unweave(weavable, weaveDepth)
_unweave(obj, 0)
class MethodHasNoAdviceError(Exception):
"""Exception raised by unwrap when a method cannot be unwrapped"""
pass
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# internals
_unwrapDict = WeakKeyDictionary()
class _MethodNameMatchingAspect(Aspect):
"""Helper aspect used by weave to match methods by name"""
def __init__(self, includes, excludes, wrapIfStartsWithUnderscore=False):
def separate(litsAndPreds):
literals, predicates = [], []
# Separate the literal names from the predicate functions
for obj in litsAndPreds:
if callable(obj):
predicates.append(obj)
else:
literals.append(str(obj))
return (literals, predicates)
self.includes, self.includesFunc = separate(includes)
self.excludes, self.excludesFunc = separate(excludes)
if not includes:
# Include all methods
self.includesFunc.append(lambda n: True)
if not wrapIfStartsWithUnderscore:
# Exclude methods that start with underscore
self.excludesFunc.append(lambda n: n and n.startswith('_'))
def isMatch(self, methodName, literals, predicates):
# Simplest case first: check for matching literal in strings
if literals and methodName in literals:
return True
elif predicates:
# Try to match with a predicate function
for predicate in predicates:
if predicate(methodName):
return True
return False
def isIncluded(self, methodName):
return self.isMatch(methodName, self.includes, self.includesFunc)
def isNotExcluded(self, methodName):
return not self.isMatch(methodName, self.excludes, self.excludesFunc)
def advise(self, method, *args, **kwargs):
_name = getattr(args[0], "__name__", None)
return method(*args, **kwargs) and self.isIncluded(_name) and self.isNotExcluded(_name)
def _getFunction(method):
"""Helper function used by iswrapped and unwrap"""
func = method
while hasattr(func, 'im_func'): func = func.im_func
return func | AOPython | /AOPython-1.0.3.zip/AOPython-1.0.3/aopython.py | aopython.py |
import doctest
import re
from aopython import Aspect, weave, iswrapped, unwrap, unweave
# Example aspects
class LoggerAspect(Aspect):
# A logger aspect that prints method invokation details
def __init__(self, verbose=False, hideAddresses=False):
self.verbose = verbose
self.hideAddresses = hideAddresses
self.memAddrRe = re.compile(r' at 0x[0123456789ABCDEF]+>')
def getArgStr(self, args, kwargs={}):
return ', '.join([self.repr_(arg) for arg in args] + ['%s=%s' % (key, repr(val)) for key, val in kwargs.items()])
def repr_(self, obj):
if self.hideAddresses:
# Remove memory addresses from representation
return self.memAddrRe.sub('>', repr(obj))
return repr(obj)
def printFunctionDetails(self, function, indent=1):
def attrRepr(obj, attrs, level=0):
results = []
for attr in attrs:
results.append(('\t' * (indent + level)) + attr + ' = ' + str(getattr(obj, attr, None)))
return '\n'.join(results)
try:
print '\t%s' % inspect.formatargspec(inspect.getargspec(function))
except:
pass
print '\t' * indent + 'repr =', self.repr_(function)
print '\t' * indent + 'dir =', dir(function)
print '\t' * indent + 'class =', getattr(function, 'im_class', type(function)).__name__
attrs = ('__class__', '__name__', '__module__', '__dict__', 'func_code')
print attrRepr(function, attrs)
if getattr(function, 'func_code', False):
attrs = ('co_name', 'co_argcount', 'co_nlocals', 'co_varnames', 'co_cellvars', 'co_freevars', 'co_consts', 'co_names', 'co_stacksize', 'co_flags')
print attrRepr(function.func_code, attrs, 1)
attrs = ('func_closure', 'func_defaults', 'func_globals', 'func_name') #, 'func_doc'
print attrRepr(function, attrs)
def advise(self, method, *args, **kwargs):
if getattr(method, 'im_class', False):
name = type(args[0]).__name__
elif hasattr(method, "__module__") and method.__module__ is not None:
name = method.__module__
else:
name = '?'
argStr = ', '.join([self.repr_(arg) for arg in args] + ['%s=%s' % (key, self.repr_(val)) for key, val in kwargs.items()])
call = '%s.%s(%s)' % (name, method.__name__, argStr)
print call
if self.verbose:
self.printFunctionDetails(method)
try:
rvalue = method(*args, **kwargs)
print 'exec %s -> %s' % (call, rvalue)
return rvalue
except Exception, e:
print 'exec %s -> %s: %s' % (call, type(e).__name__, e)
raise
else:
return method(*args, **kwargs)
class FunctionResultLoggerAspect(LoggerAspect):
# A logger aspect that can be used to wrap __getattribute__
def advise(self, method, *args, **kwargs):
rvalue = method(*args, **kwargs)
print '%s returned %s' % (method.__name__, rvalue)
if self.verbose:
self.printFunctionDetails(rvalue)
return rvalue
class TestAspect(Aspect):
# Test aspect prints method signature before and after method execution
def advise(self, method, *args, **kwargs):
print 'TestAspect.advise checking attribute %s.exposed = %s' % (method.__name__, getattr(method, 'exposed', '<undefined>'))
return method(*args, **kwargs)
class TestAspect2(Aspect):
def advise(self, method, *args, **kwargs):
newArgs = args[0:-1] + ('TestAspect2 replaced arg',)
try:
print "TestAspect2.advise change last arg from %s to %s" % (repr(args[-1]), repr(newArgs[-1]))
return method(*newArgs, **kwargs)
except Exception, e:
print "TestAspect2.advise fall back to original args due to exception: %s: %s" % (type(e).__name__, e)
return method(*args, **kwargs)
class Test(object):
def __repr__(self):
return '<Test object>'
def __str__(self):
return 'Test'
def func(self, x=1):
return x
def func2(self, x):
return '%s + 100' % x
def func3(self, x):
return x
def _func(self, x):
raise Exception('_func should not be wrapped')
func.exposed = True
func2.exposed = False
def _test():
import doctest, aopythonexamples
return doctest.testmod(aopythonexamples)
if __name__ == "__main__":
_test() | AOPython | /AOPython-1.0.3.zip/AOPython-1.0.3/aopythonexamples.py | aopythonexamples.py |
GANGNAM = 0
GANGDONG = 1
GANGBOOK = 2
GANGSEO = 3
GWANAK = 4
GWANJIN = 5
GURO = 6
GUMCHEON = 7
NOWON = 8
DOBONG = 9
DONGDAEMOON = 10
DONGJAK = 11
MAPO = 12
SEODAEMOON = 13
SEOCHO = 14
SEONGDONG = 15
SEONGBOOK = 16
SONGPA = 17
YANGCHEON = 18
YOUNGDEUNGPO = 19
YONGSAN = 20
EUNPYONG = 21
JONGRO = 22
JOONG = 23
JOONGRANG = 24
gu = []
gu.append(('강남구', '개포동', '논현동', '대치동', '도곡동', '삼성동', '세곡동', '수서동', '신사동', '압구정동', '역삼동', '율현동', '일원동', '자곡동', '청담동'))
gu.append(('강동구', '강일동', '고덕동', '길동', '둔촌동', '명일동', '상일동', '성내동', '암사동', '천호동'))
gu.append(('강북구', '미아동', '번동', '수유동', '우이동'))
gu.append(('강서구', '가양동', '개화동', '공항동', '과해동', '내발산동', '등촌동', '마곡동', '방화동', '염창동', '오곡동', '오쇠동', '외발산동', '화곡동'))
gu.append(('관악구', '남현동', '봉천동', '신림동'))
gu.append(('광진구', '광장동', '구의동', '군자동', '능동', '자양동', '중곡동', '화양동'))
gu.append(('구로구', '가리봉동', '개봉동', '고척동', '구로동', '궁동', '신도림동', '오류동', '온수동', '천왕동', '항동'))
gu.append(('금천구', '가산동', '독산동', '시흥동'))
gu.append(('노원구', '공릉동', '상계동', '월계동', '중계동', '하계동'))
gu.append(('도봉구', '도봉동', '방학동', '쌍문동', '창동'))
gu.append(('동대문구', '답십리동', '신설동', '용두동', '이문동', '장안동', '전농동', '제기동', '청량리동', '회기동', '휘경동'))
gu.append(('동작구', '노량진동', '대방동', '동작동', '본동', '사당동', '상도동', '신대방동', '흑석동'))
gu.append(('마포구', '공덕동', '구수동', '노고산동', '당인동', '대흥동', '도화동', '동교동', '마포동', '망원동', '상수동', '상암동', '서교동', '성산동', '신공덕동', '신수동', '신정동', '아현동', '연남동', '염리동', '용강동', '중동', '창전동', '토정동', '하중동', '합정동', '현석동'))
gu.append(('서대문구', '남가좌동', '냉천동', '대신동', '대현동', '미근동', '봉원동', '북가좌동', '북아현동', '신촌동', '연희동', '영천동', '옥천동', '창천동', '천연동', '합동', '현저동', '홍은동', '홍제동', '충정로'))
gu.append(('서초구', '내곡동', '반포동', '방배동', '서초동', '신원동', '양재동', '염곡동', '우면동', '원지동', '잠원동'))
gu.append(('성동구','금호동', '도선동', '마장동', '사근동', '상왕십리동', '성수동', '송정동', '옥수동', '용답동', '응봉동', '하왕십리동', '행당동', '홍익동'))
gu.append(('성북구', '길음동', '돈암동', '동선동', '동소문동', '보문동', '삼선동', '상월곡동', '석관동', '성북동', '안암동', '장위동', '정릉동', '종암동', '하월곡동'))
gu.append(('송파구', '가락동', '거여동', '마천동', '문정동', '방이동', '삼전동', '석촌동', '송파동', '신천동', '오금동', '잠실동', '장지동', '풍납동'))
gu.append(('양천구', '목동', '신월동', '신정동'))
gu.append(('영등포구', '당산동', '대림동', '도림동', '문래동', '신길동', '양평동', '양화동', '여의도동', '영등포동'))
gu.append(('용산구', '갈월동', '남영동', '도원동', '동빙고동', '동자동', '문배동', '보광동', '산천동', '서계동', '서빙고동', '신계동', '신창동', '용문동', '용산동', '원효로', '이촌동', '이태원동', '주성동', '청암동', '청파동', '한강로', '한남동', '효창동', '후암동'))
gu.append(('은평구', '갈현동', '구산동', '녹번동', '대조동', '불광동', '수색동', '신사동', '역촌동', '응암동', '증산동', '진관동'))
gu.append(('종로구', '가회동', '견지동', '경운동', '계동', '공평동', '관수동', '관철동', '관훈동', '교남동', '교북동', '구기동', '궁정동', '권농동', '낙원동', '내수동', '내자동', '누상동', '누하동', '당주동', '도렴동', '돈의동', '동숭동', '명륜동', '묘동', '무악동', '봉익동', '부암동', '사간동', '사직동', '삼청동', '서린동', '세종로', '소격동', '송월동', '송현동', '수송동', '숭인동', '신교동', '신문로', '신영동', '안국동', '연건동', '연지동', '예지동', '옥인동', '와룡동', '운니동', '원남동', '원서동', '이화동', '익선동', '인사동', '인의동', '장사동', '재동', '적선동', '종로', '중학동', '창성동', '창신동', '청운동', '청진동', '체부동', '충신동', '통의동', '통인동', '팔판동', '평동', '평창동', '필운동', '행촌동', '혜화동', '홍지동', '홍파동', '화동', '효자동', '효제동', '훈정동'))
gu.append(('중구', '광희동', '남대문로', '남산동', '남창동', '남학동', '다동', '만리동', '명동', '무교동', '무학동', '묵정동', '방산동', '봉래동', '북창동', '산림동', '삼각동', '서소문동', '소공동', '수표동', '수하동', '순화동', '신당동', '쌍림동', '예관동', '예장동', '오장동', '을지로', '의주로', '인현동', '입정동', '장교동', '장충동', '저동', '정동', '주교동', '주자동', '중림동', '초동', '충무로', '태평로', '필동', '황학동', '화현동', '흥인동', '충정로1가'))
gu.append(('중랑구', '망우동', '면목동', '묵동', '상봉동', '신내동', '중화동')) | APA | /APA-1.0.0.tar.gz/APA-1.0.0/location.py | location.py |
import tweepy
import datetime
import MySQLdb as mdb
import location as loc
from threading import Thread, BoundedSemaphore
import urllib
from xml.dom import minidom
################# TWITTER AUTHENTIFICATION KEYS #################
CONSUMER_KEY = 'g5xQx67GQgzJCs4aNam1Q'
CONSUMER_SECRET = '4T5y74q6MATeiA13pktZJiyvSSCAu4cjLUesnJLg'
ACCESS_TOKEN = '1440443094-X7g4mB05cCU5KT5gaaHkj3sqaWkbcJRDHbs0tKZ'
ACCESS_SECRET = 'LBAIA50pmrahav4grydp7r2ylvRgy2Pa5y6xgNK0zrdzC'
############ DO NOT REVEAL ABOVE KEYS TO OTHER PERSON ###########
########## Setting OAuth Handler ##########
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_SECRET)
api = tweepy.API(auth)
########## ETC global variables ###########
today = datetime.date.today()
con = mdb.connect('localhost', 'root', 'dhepd','test')
sema = BoundedSemaphore(value=1)
class finder(Thread):
def __init__(self, keyword):
Thread.__init__(self)
self.keyword = keyword
def run(self):
sema.acquire()
########## START CRITICAL SECTION ##########
for tweet in tweepy.Cursor(api.search, q=self.keyword, rpp=100, lang='ko').items():
with con:
if (tweet.created_at.day+2 < today.day):
break
else:
cur = con.cursor()
tid = tweet.user.id
tname = tweet.user.name.encode('utf-8')
text = tweet.text.encode('utf-8')
location = self.GetLocation(text)
tp = ''
if (self.keyword=='교통사고 poltra -RT'):
tp = '교통사고'
if (self.keyword=='화재 발생 -RT'):
tp = '화재'
if (location!=False):
geo = self.GetGeo(location)
lat = geo[0]
lng = geo[1]
cur.execute("INSERT INTO test VALUES(%s, %s, %s, %s, %s, %s, %s, %s)", (tid, tname, location, lat, lng, tp, text, tweet.created_at))
########## END CRITICAL SECTION ###########
sema.release()
def GetLocation(self, text):
for i in range(0, 25):
for j in range (0, len(loc.gu[i])):
if (text.count(loc.gu[i][j])>=1):
return loc.gu[i][j]
return False
def GetGeo(self, location):
url = 'http://apis.daum.net/local/geo/addr2coord?apikey=6acc30183ab818fa90193ab6154616892a68508f&q='+location+'&output=xml'
dom = minidom.parse(urllib.urlopen(url))
items = dom.getElementsByTagName("item")
lat = items[0].getElementsByTagName("lat")
lng = items[0].getElementsByTagName('lng')
return (lat[0].firstChild.data,lng[0].firstChild.data)
find = finder('교통사고 poltra -RT')
find2 = finder('화재 발생 -RT')
find.start()
find2.start() | APA | /APA-1.0.0.tar.gz/APA-1.0.0/twit.py | twit.py |
# TODO: The maintainer of this repo has not yet edited this file
**REPO OWNER**: Do you want Customer Service & Support (CSS) support for this product/project?
- **No CSS support:** Fill out this template with information about how to file issues and get help.
- **Yes CSS support:** Fill out an intake form at [aka.ms/onboardsupport](https://aka.ms/onboardsupport). CSS will work with/help you to determine next steps.
- **Not sure?** Fill out an intake as though the answer were "Yes". CSS will help you decide.
*Then remove this first heading from this SUPPORT.MD file before publishing your repo.*
# Support
## How to file issues and get help
This project uses GitHub Issues to track bugs and feature requests. Please search the existing
issues before filing new issues to avoid duplicates. For new issues, file your bug or
feature request as a new Issue.
For help and questions about using this project, please **REPO MAINTAINER: INSERT INSTRUCTIONS HERE
FOR HOW TO ENGAGE REPO OWNERS OR COMMUNITY FOR HELP. COULD BE A STACK OVERFLOW TAG OR OTHER
CHANNEL. WHERE WILL YOU HELP PEOPLE?**.
## Microsoft Support Policy
Support for this **PROJECT or PRODUCT** is limited to the resources listed above.
| APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/SUPPORT.md | SUPPORT.md |
<!-- BEGIN MICROSOFT SECURITY.MD V0.0.8 BLOCK -->
## Security
Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/).
If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://aka.ms/opensource/security/definition), please report it to us as described below.
## Reporting Security Issues
**Please do not report security vulnerabilities through public GitHub issues.**
Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/opensource/security/create-report).
If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://aka.ms/opensource/security/pgpkey).
You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://aka.ms/opensource/security/msrc).
Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
* Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
* Full paths of source file(s) related to the manifestation of the issue
* The location of the affected source code (tag/branch/commit or direct URL)
* Any special configuration required to reproduce the issue
* Step-by-step instructions to reproduce the issue
* Proof-of-concept or exploit code (if possible)
* Impact of the issue, including how an attacker might exploit the issue
This information will help us triage your report more quickly.
If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://aka.ms/opensource/security/bounty) page for more details about our active programs.
## Preferred Languages
We prefer all communications to be in English.
## Policy
Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://aka.ms/opensource/security/cvd).
<!-- END MICROSOFT SECURITY.MD BLOCK -->
| APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/SECURITY.md | SECURITY.md |
# TorchScale - A Library for Transformers at (Any) Scale
<p>
<a href="https://github.com/microsoft/torchscale/blob/main/LICENSE"><img alt="MIT License" src="https://img.shields.io/badge/license-MIT-blue.svg" /></a>
<a href="https://pypi.org/project/torchscale"><img alt="MIT License" src="https://badge.fury.io/py/torchscale.svg" /></a>
</p>
TorchScale is a PyTorch library that allows researchers and developers to scale up Transformers efficiently and effectively.
It has the implementation of fundamental research to improve modeling generality and capability as well as training stability and efficiency of scaling Transformers.
- Stability - [**DeepNet**](https://arxiv.org/abs/2203.00555): scaling Transformers to 1,000 Layers and beyond
- Generality - [**Foundation Transformers (Magneto)**](https://arxiv.org/abs/2210.06423): towards true general-purpose modeling across tasks and modalities (including language, vision, speech, and multimodal)
- Capability - A [**Length-Extrapolatable**](https://arxiv.org/abs/2212.10554) Transformer
- Efficiency - [**X-MoE**](https://arxiv.org/abs/2204.09179): scalable & finetunable sparse Mixture-of-Experts (MoE)
## News
- November, 2022: TorchScale 0.1.1 released [[Paper](https://arxiv.org/abs/2211.13184)] [[PyPI](https://pypi.org/project/torchscale/)]
## Installation
To install:
```
pip install torchscale
```
Alternatively, you can develop it locally:
```
git clone https://github.com/microsoft/torchscale.git
cd torchscale
pip install -e .
```
## Getting Started
It takes only several lines of code to create a model with the above fundamental research features enabled. Here is how to quickly obtain a BERT-like encoder:
```python
>>> from torchscale.architecture.config import EncoderConfig
>>> from torchscale.architecture.encoder import Encoder
>>> config = EncoderConfig(vocab_size=64000)
>>> model = Encoder(config)
>>> print(model)
```
We also support the `Decoder` architecture and the `EncoderDecoder` architecture:
```python
# Creating a decoder model
>>> from torchscale.architecture.config import DecoderConfig
>>> from torchscale.architecture.decoder import Decoder
>>> config = DecoderConfig(vocab_size=64000)
>>> decoder = Decoder(config)
>>> print(decoder)
# Creating a encoder-decoder model
>>> from torchscale.architecture.config import EncoderDecoderConfig
>>> from torchscale.architecture.encoder_decoder import EncoderDecoder
>>> config = EncoderDecoderConfig(vocab_size=64000)
>>> encdec = EncoderDecoder(config)
>>> print(encdec)
```
## Key Features
- [DeepNorm to improve the training stability of Post-LayerNorm Transformers](https://arxiv.org/abs/2203.00555)
* enabled by setting *deepnorm=True* in the `Config` class.
* It adjusts both the residual connection and the initialization method according to the model architecture (i.e., encoder, decoder, or encoder-decoder).
- [SubLN for the model generality and the training stability](https://arxiv.org/abs/2210.06423)
* enabled by *subln=True*. This is enabled by default.
* It introduces another LayerNorm to each sublayer and adjusts the initialization according to the model architecture.
* Note that SubLN and DeepNorm cannot be used in one single model.
- [X-MoE: efficient and finetunable sparse MoE modeling](https://arxiv.org/abs/2204.09179)
* enabled by *use_xmoe=True*.
* It replaces every *'moe_freq'* `FeedForwardNetwork` layers with the X-MoE layers.
- [Multiway architecture for multimodality](https://arxiv.org/abs/2208.10442)
* enabled by *multiway=True*.
* It provides a pool of Transformer's parameters used for different modalities.
- [Extrapolatable position embedding (Xpos)](https://arxiv.org/abs/2212.10554)
* enabled by *xpos_rel_pos=True*.
- [Relative position bias](https://arxiv.org/abs/1910.10683)
* enabled by adjusting *rel_pos_buckets* and *max_rel_pos*.
- [SparseClip: improving the gradient clipping for sparse MoE models](https://arxiv.org/abs/2211.13184)
* we provide a [sample code](examples/fairseq/utils/sparse_clip.py) that can be easily adapted to the FairSeq (or other) repo.
Most of the features above can be used by simply passing the corresponding parameters to the config. For example:
```python
>>> from torchscale.architecture.config import EncoderConfig
>>> from torchscale.architecture.encoder import Encoder
>>> config = EncoderConfig(vocab_size=64000, deepnorm=True, multiway=True)
>>> model = Encoder(config)
>>> print(model)
```
## Examples
We have the examples of how to use TorchScale in the following scenarios/tasks:
- Language
* [Decoder/GPT](examples/fairseq/README.md#example-gpt-pretraining)
* [Encoder-Decoder/Neural Machine Translation](examples/fairseq/README.md#example-machine-translation)
* [Encoder/BERT](examples/fairseq/README.md#example-bert-pretraining)
- Vision
* ViT/BEiT [In progress]
- Speech
- Multimodal
* [Multiway Transformers/BEiT-3](https://github.com/microsoft/unilm/tree/master/beit3)
We plan to provide more examples regarding different tasks (e.g. vision pretraining and speech recognition) and various deep learning toolkits (e.g. [DeepSpeed](https://github.com/microsoft/DeepSpeed) and [Megatron-LM](https://github.com/NVIDIA/Megatron-LM)). Any comments or PRs are welcome!
## Results
### Stability Evaluation
<p align="center">
<img src="https://publicmodel.blob.core.windows.net/torchscale/pic/convergence.png" width="800"/>
</p>
The training curve is smooth by using TorchScale, while the baseline Transformer cannot converge.
### Scaling-up Experiments
<p align="center">
<img src="https://publicmodel.blob.core.windows.net/torchscale/pic/scaling_curve.png" width="800"/>
</p>
TorchScale supports arbitrary depths and widths, successfully scaling-up the models without pain.
## Acknowledgments
Some implementations in TorchScale are either adapted from or inspired by the [FairSeq](https://github.com/facebookresearch/fairseq) repository and the [UniLM](https://github.com/microsoft/unilm) repository.
## Citations
If you find this repository useful, please consider citing our work:
```
@article{torchscale,
author = {Shuming Ma and Hongyu Wang and Shaohan Huang and Wenhui Wang and Zewen Chi and Li Dong and Alon Benhaim and Barun Patra and Vishrav Chaudhary and Xia Song and Furu Wei},
title = {{TorchScale}: {Transformers} at Scale},
journal = {CoRR},
volume = {abs/2211.13184},
year = {2022}
}
```
```
@article{deepnet,
author = {Hongyu Wang and Shuming Ma and Li Dong and Shaohan Huang and Dongdong Zhang and Furu Wei},
title = {{DeepNet}: Scaling {Transformers} to 1,000 Layers},
journal = {CoRR},
volume = {abs/2203.00555},
year = {2022},
}
```
```
@article{magneto,
author = {Hongyu Wang and Shuming Ma and Shaohan Huang and Li Dong and Wenhui Wang and Zhiliang Peng and Yu Wu and Payal Bajaj and Saksham Singhal and Alon Benhaim and Barun Patra and Zhun Liu and Vishrav Chaudhary and Xia Song and Furu Wei},
title = {Foundation {Transformers}},
journal = {CoRR},
volume = {abs/2210.06423},
year = {2022}
}
```
```
@inproceedings{xmoe,
title={On the Representation Collapse of Sparse Mixture of Experts},
author={Zewen Chi and Li Dong and Shaohan Huang and Damai Dai and Shuming Ma and Barun Patra and Saksham Singhal and Payal Bajaj and Xia Song and Xian-Ling Mao and Heyan Huang and Furu Wei},
booktitle={Advances in Neural Information Processing Systems},
year={2022},
url={https://openreview.net/forum?id=mWaYC6CZf5}
}
```
## Contributing
This project welcomes contributions and suggestions. Most contributions require you to agree to a
Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com.
When you submit a pull request, a CLA bot will automatically determine whether you need to provide
a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions
provided by the bot. You will only need to do this once across all repos using our CLA.
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
contact [Furu Wei](mailto:fuwei@microsoft.com) and [Shuming Ma](mailto:shumma@microsoft.com) with any additional questions or comments.
## Trademarks
This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft
trademarks or logos is subject to and must follow
[Microsoft's Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks/usage/general).
Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship.
Any use of third-party trademarks or logos are subject to those third-party's policies.
| APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/README.md | README.md |
# Example: Integration with FairSeq
## Setup
```bash
# Install the repo as a package:
git clone https://github.com/microsoft/torchscale.git
cd torchscale
pip install -e .
pip install git+https://github.com/shumingma/fairseq.git@moe
pip install git+https://github.com/shumingma/infinibatch.git
pip install iopath
pip install numpy==1.23.0
```
## Example: BERT Pretraining
### Data Format
We use a [streaming dataloader](https://github.com/microsoft/infinibatch) to read the data on-the-fly from the disk. It requires the data sharded into multiple small files (e.g. 10K lines per file), as well as a JSON file to contain some meta data and the paths to these files.
The overall data directory should be organized as follows:
```
Data/
├── json/
│ ├── train.json
│ └── valid.json
├── shard/
│ ├── train/
│ │ ├── 00000.txt
│ │ ├── 00001.txt
│ │ └── ...
│ └── valid/
│ ├── 00000.txt
│ ├── 00001.txt
│ └── ...
├── dict.txt
└── sentencepiece.bpe.model
```
We recommend that each sharded data files contains no more than 10K lines with one sentence per line, and two documents should be separated with an empty line.
```
Document 1 Line 1
Document 1 Line 2
Document 1 Line 3
Document 2 Line 1
Document 2 Line 2
...
```
Also, the JSON file should be in the format like this:
```
[
{
"source": [
"shard/train/00000.txt",
"shard/train/00001.txt",
...
],
"source_lang": "en",
"weight": 1.0
}
]
```
You can quickly get started with our processed vocabulary files: [sentencepiece.bpe.model](https://publicmodel.blob.core.windows.net/torchscale/vocab/sentencepiece.bpe.model) and [dict.txt](https://publicmodel.blob.core.windows.net/torchscale/vocab/dict.txt). Note that this vocabulary is English-only with 64K tokens. To train a new `sentencepiece.bpe.model` on your own data, please refer to the [SentencePiece](https://github.com/google/sentencepiece) repo. With the sentecepiece model and the installed `sentencepiece` library, you can extract the `dict.txt` file from it by
```
spm_export_vocab --model=sentencepiece.bpe.model | sed 's/\t/ /g' | tail -n +4 > dict.txt
```
### Dense Model
```bash
cd examples/fairseq/
python -m torch.distributed.launch --nproc_per_node=8 --nnodes=8 train.py ${PATH_TO_DATA} \
--task pretraining \
--tokens-per-sample 512 \
--mask-prob 0.15 \
--span-length 3.0 \
--leave-unmasked-prob 0.0 \
--random-token-prob 0.0 \
--criterion masked_lm \
--arch mlm_base \
--share-encoder-input-output-embed \
--required-batch-size-multiple 8 \
--spm-model ${PATH_TO_DATA}/sentencepiece.bpe.model \
--dict-file ${PATH_TO_DATA}/dict.txt \
--optimizer adam \
--adam-betas '(0.9,0.98)' \
--adam-eps 1e-6 \
--clip-norm 2.0 \
--lr-scheduler polynomial_decay \
--lr 0.0005 \
--warmup-updates 10000 \
--total-num-update 125000 \
--max-update 125000 \
--max-sentences 32 \
--update-freq 1 \
--log-format simple \
--log-interval 100 \
--disable-validation \
--save-interval-updates 5000 \
--no-epoch-checkpoints \
--fp16 \
--fp16-init-scale 4 \
--fp16-scale-window 256 \
--min-loss-scale 0.0001 \
--seed 1 \
--save-dir ${PATH_TO_CKPT} \
--ddp-backend=no_c10d \
--distributed-no-spawn \
--reset-dataloader \
--batch-read-ahead 10000 \
--rel-pos-buckets 32 \
--max-rel-pos 128 \
--deepnorm
```
### Sparse (MoE) Model
```bash
cd examples/fairseq/
python -m torch.distributed.launch --nproc_per_node=8 --nnodes=8 train.py ${PATH_TO_DATA} \
--task pretraining \
--tokens-per-sample 512 \
--mask-prob 0.15 \
--span-length 3.0 \
--leave-unmasked-prob 0.0 \
--random-token-prob 0.0 \
--arch mlm_base \
--share-encoder-input-output-embed \
--required-batch-size-multiple 8 \
--spm-model ${PATH_TO_DATA}/sentencepiece.bpe.model \
--dict-file ${PATH_TO_DATA}/dict.txt \
--optimizer adam \
--adam-betas '(0.9,0.98)' \
--adam-eps 1e-6 \
--clip-norm 2.0 \
--lr-scheduler polynomial_decay \
--lr 0.0005 \
--warmup-updates 10000 \
--total-num-update 125000 \
--max-update 125000 \
--max-sentences 32 \
--update-freq 1 \
--log-format simple \
--log-interval 100 \
--disable-validation \
--save-interval-updates 5000 \
--no-epoch-checkpoints \
--fp16 \
--fp16-init-scale 4 \
--fp16-scale-window 256 \
--min-loss-scale 0.0001 \
--seed 1 \
--save-dir ${PATH_TO_CKPT} \
--ddp-backend=no_c10d \
--distributed-no-spawn \
--reset-dataloader \
--batch-read-ahead 10000 \
--rel-pos-buckets 32 \
--max-rel-pos 128 \
--deepnorm \
--moe-expert-count 64 --moe-freq 2 \
--moe-gating-use-fp32 --moe-second-expert-policy random --moe-normalize-gate-prob-before-dropping \
--moe-eval-capacity-token-fraction -1.0 \
--criterion masked_lm_moe_cross_entropy --moe-gate-loss-wt 0.01 --moe-gate-loss-combine-method sum \
--use-xmoe --pad-to-max-length
```
## Example: GPT Pretraining
### Data Format
We use the format as in the FairSeq's [language modeling example](https://github.com/facebookresearch/fairseq/tree/main/examples/language_model#1-preprocess-the-data).
### Dense Model
```bash
cd examples/fairseq/
python -m torch.distributed.launch --nproc_per_node=2 --nnodes=1 train.py \
${PATH_TO_DATA} \
--num-workers 2 \
--activation-fn gelu \
--share-decoder-input-output-embed \
--validate-interval-updates 1000 \
--save-interval-updates 1000 \
--no-epoch-checkpoints \
--memory-efficient-fp16 \
--fp16-init-scale 4 \
--arch lm_base \
--task language_modeling \
--sample-break-mode none \
--tokens-per-sample 128 \
--optimizer adam --adam-betas "(0.9, 0.98)" \
--adam-eps 1e-08 \
--clip-norm 0.0 \
--lr 5e-4 \
--lr-scheduler polynomial_decay \
--warmup-updates 750 \
--dropout 0.1 \
--attention-dropout 0.1 \
--weight-decay 0.01 \
--batch-size 4 \
--update-freq 1 \
--required-batch-size-multiple 1 \
--total-num-update 50000 \
--max-update 50000 \
--seed 1 \
--ddp-backend=c10d
```
### Sparse (MoE) Model
```bash
cd examples/fairseq/
python -m torch.distributed.launch --nproc_per_node=2 --nnodes=1 train.py \
${PATH_TO_DATA} \
--num-workers 2 \
--activation-fn gelu \
--share-decoder-input-output-embed \
--validate-interval-updates 1000 \
--save-interval-updates 1000 \
--no-epoch-checkpoints \
--memory-efficient-fp16 \
--fp16-init-scale 4 \
--arch lm_base \
--task language_modeling \
--sample-break-mode none \
--tokens-per-sample 128 \
--optimizer adam --adam-betas "(0.9, 0.98)" \
--adam-eps 1e-08 \
--clip-norm 0.0 \
--lr 5e-4 \
--lr-scheduler polynomial_decay \
--warmup-updates 750 \
--dropout 0.1 \
--attention-dropout 0.1 \
--weight-decay 0.01 \
--batch-size 4 \
--update-freq 1 \
--required-batch-size-multiple 1 \
--total-num-update 50000 \
--max-update 50000 \
--seed 1 \
--ddp-backend=no_c10d \
--moe-expert-count 2 --moe-freq 2 \
--moe-gating-use-fp32 --moe-second-expert-policy random --moe-normalize-gate-prob-before-dropping \
--moe-eval-capacity-token-fraction -1.0 \
--criterion moe_cross_entropy --moe-gate-loss-wt 0.01 --moe-gate-loss-combine-method sum \
--use-xmoe
```
## Example: Machine Translation
### Data Format
We follow the FairSeq's [neural machine translation example](https://github.com/facebookresearch/fairseq/tree/main/examples/translation#training-a-new-model) to preprocess the data.
### Dense Model
```bash
cd examples/fairseq/
python -m torch.distributed.launch --nproc_per_node=2 --nnodes=1 train.py \
${PATH_TO_DATA} \
--arch mt_base --share-decoder-input-output-embed \
--optimizer adam --adam-betas '(0.9, 0.98)' --clip-norm 0.0 \
--lr 5e-4 --lr-scheduler inverse_sqrt --warmup-updates 4000 \
--dropout 0.3 --weight-decay 0.0001 \
--max-tokens 4096 --fp16
```
### Sparse (MoE) Model
```bash
cd examples/fairseq/
python -m torch.distributed.launch --nproc_per_node=2 --nnodes=1 train.py \
${PATH_TO_DATA} \
--arch mt_base --share-decoder-input-output-embed \
--optimizer adam --adam-betas '(0.9, 0.98)' --clip-norm 0.0 \
--lr 5e-4 --lr-scheduler inverse_sqrt --warmup-updates 4000 \
--dropout 0.3 --weight-decay 0.0001 \
--moe-expert-count 2 --moe-freq 2 \
--moe-gating-use-fp32 --moe-second-expert-policy random --moe-normalize-gate-prob-before-dropping \
--moe-eval-capacity-token-fraction -1.0 \
--criterion moe_cross_entropy --moe-gate-loss-wt 0.01 --moe-gate-loss-combine-method sum \
--use-xmoe \
--max-tokens 4096 --fp16
```
| APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/examples/fairseq/README.md | README.md |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
from typing import Dict, List, Optional, Tuple
import torch
from fairseq import distributed_utils, utils
from fairseq.distributed import fsdp_wrap
from fairseq.models import (
FairseqEncoder,
FairseqEncoderDecoderModel,
register_model,
register_model_architecture,
)
from fairseq.models.transformer import Embedding
from fairseq.modules import PositionalEmbedding
from torch import Tensor
from torchscale.architecture.config import DecoderConfig, EncoderConfig
from torchscale.architecture.encoder import Encoder
from .language_modeling import LMDecoder as MTDecoder
logger = logging.getLogger(__name__)
DEFAULT_MAX_SOURCE_POSITIONS = 1024
DEFAULT_MAX_TARGET_POSITIONS = 1024
DEFAULT_MIN_PARAMS_TO_WRAP = int(1e8)
@register_model("mt")
class TranslationModel(FairseqEncoderDecoderModel):
def __init__(self, args, encoder, decoder):
super().__init__(encoder, decoder)
self.args = args
@staticmethod
def add_args(parser):
"""Add model-specific arguments to the parser."""
# fmt: off
parser.add_argument('--activation-fn',
choices=utils.get_available_activation_fns(),
help='activation function to use')
parser.add_argument('--dropout', type=float, metavar='D',
help='dropout probability')
parser.add_argument('--attention-dropout', type=float, metavar='D',
help='dropout probability for attention weights')
parser.add_argument('--activation-dropout', '--relu-dropout', type=float, metavar='D',
help='dropout probability after activation in FFN.')
parser.add_argument('--encoder-embed-path', type=str, metavar='STR',
help='path to pre-trained encoder embedding')
parser.add_argument('--encoder-embed-dim', type=int, metavar='N',
help='encoder embedding dimension')
parser.add_argument('--encoder-ffn-embed-dim', type=int, metavar='N',
help='encoder embedding dimension for FFN')
parser.add_argument('--encoder-layers', type=int, metavar='N',
help='num encoder layers')
parser.add_argument('--encoder-attention-heads', type=int, metavar='N',
help='num encoder attention heads')
parser.add_argument('--encoder-normalize-before', action='store_true',
help='apply layernorm before each encoder block')
parser.add_argument('--encoder-learned-pos', action='store_true',
help='use learned positional embeddings in the encoder')
parser.add_argument('--decoder-embed-path', type=str, metavar='STR',
help='path to pre-trained decoder embedding')
parser.add_argument('--decoder-embed-dim', type=int, metavar='N',
help='decoder embedding dimension')
parser.add_argument('--decoder-ffn-embed-dim', type=int, metavar='N',
help='decoder embedding dimension for FFN')
parser.add_argument('--decoder-layers', type=int, metavar='N',
help='num decoder layers')
parser.add_argument('--decoder-attention-heads', type=int, metavar='N',
help='num decoder attention heads')
parser.add_argument('--decoder-learned-pos', action='store_true',
help='use learned positional embeddings in the decoder')
parser.add_argument('--decoder-normalize-before', action='store_true',
help='apply layernorm before each decoder block')
parser.add_argument('--decoder-output-dim', type=int, metavar='N',
help='decoder output dimension (extra linear layer '
'if different from decoder embed dim')
parser.add_argument('--share-decoder-input-output-embed', action='store_true',
help='share decoder input and output embeddings')
parser.add_argument('--share-all-embeddings', action='store_true',
help='share encoder, decoder and output embeddings'
' (requires shared dictionary and embed dim)')
parser.add_argument('--no-token-positional-embeddings', default=False, action='store_true',
help='if set, disables positional embeddings (outside self attention)')
parser.add_argument('--adaptive-softmax-cutoff', metavar='EXPR',
help='comma separated list of adaptive softmax cutoff points. '
'Must be used with adaptive_loss criterion'),
parser.add_argument('--adaptive-softmax-dropout', type=float, metavar='D',
help='sets adaptive softmax dropout for the tail projections')
parser.add_argument('--layernorm-embedding', action='store_true',
help='add layernorm to embedding')
parser.add_argument('--no-scale-embedding', action='store_true',
help='if True, dont scale embeddings')
parser.add_argument('--checkpoint-activations', action='store_true',
help='checkpoint activations at each layer, which saves GPU '
'memory usage at the cost of some additional compute')
parser.add_argument('--offload-activations', action='store_true',
help='checkpoint activations at each layer, then save to gpu. Sets --checkpoint-activations.')
# args for "Cross+Self-Attention for Transformer Models" (Peitz et al., 2019)
parser.add_argument('--no-cross-attention', default=False, action='store_true',
help='do not perform cross-attention')
parser.add_argument('--cross-self-attention', default=False, action='store_true',
help='perform cross+self-attention')
# args for "Reducing Transformer Depth on Demand with Structured Dropout" (Fan et al., 2019)
parser.add_argument('--encoder-layerdrop', type=float, metavar='D', default=0,
help='LayerDrop probability for encoder')
parser.add_argument('--decoder-layerdrop', type=float, metavar='D', default=0,
help='LayerDrop probability for decoder')
parser.add_argument('--encoder-layers-to-keep', default=None,
help='which layers to *keep* when pruning as a comma-separated list')
parser.add_argument('--decoder-layers-to-keep', default=None,
help='which layers to *keep* when pruning as a comma-separated list')
# args for Training with Quantization Noise for Extreme Model Compression ({Fan*, Stock*} et al., 2020)
parser.add_argument('--quant-noise-pq', type=float, metavar='D', default=0,
help='iterative PQ quantization noise at training time')
parser.add_argument('--quant-noise-pq-block-size', type=int, metavar='D', default=8,
help='block size of quantization noise at training time')
parser.add_argument('--quant-noise-scalar', type=float, metavar='D', default=0,
help='scalar quantization noise and scalar quantization at training time')
# args for Fully Sharded Data Parallel (FSDP) training
parser.add_argument(
'--min-params-to-wrap', type=int, metavar='D', default=DEFAULT_MIN_PARAMS_TO_WRAP,
help=(
'minimum number of params for a layer to be wrapped with FSDP() when '
'training with --ddp-backend=fully_sharded. Smaller values will '
'improve memory efficiency, but may make torch.distributed '
'communication less efficient due to smaller input sizes. This option '
'is set to 0 (i.e., always wrap) when --checkpoint-activations or '
'--offload-activations are passed.'
)
)
# args for mixture-of-expert layers
parser.add_argument('--moe-freq', type=int, metavar='D', default=0,
help='Frequency at which we insert MoE Transformer layers')
parser.add_argument('--encoder-moe-freq', type=int, metavar='D', default=0,
help='Frequency at which we insert MoE Transformer encoder layers')
parser.add_argument('--decoder-moe-freq', type=int, metavar='D', default=0,
help='Frequency at which we insert MoE Transformer decoder layers')
parser.add_argument('--moe-expert-count', type=int, metavar='D', default=0,
help='Number of experts in each MoE Layer')
parser.add_argument('--moe-gating-use-fp32', default=False, action='store_true',
help="Use FP32 computations in MoE top2 gating function")
parser.add_argument('--moe-second-expert-policy', type=str, default='sampling',
help="policy for second expert, options: all/sampling/random")
parser.add_argument(
'--moe-normalize-gate-prob-before-dropping', default=False, action='store_true',
help=(
"whether to normalize gate probs before or after dropping experts "
"for capacity and randomization"
)
)
parser.add_argument('--moe-expert-ffn-dim', type=int, default=0,
help="MoE Expert FFN dimension")
parser.add_argument('--moe-top1-expert', default=False, action='store_true',
help="Use top1 gate instead of top2")
parser.add_argument(
'--moe-eval-capacity-token-fraction', type=float, default=0.25,
help=(
"Fraction of tokens as capacity during validation"
"if set to negative, use same as training. range: (0.0, 1.0]."
)
)
parser.add_argument('--moe-normalize-expert-grad', type=str, default='world_size',
help="Divide expert gradients by (1) 'world_size' (2) 'sqrt_world_size'")
parser.add_argument('--use-moe-pad-mask', default=False, action='store_true',
help="Don't route padding tokens to any expert")
parser.add_argument('--use-xmoe', default=False, action='store_true',
help="Enable X-Moe")
parser.add_argument('--freeze-moe', default=False, action='store_true',
help="Freeze MoE Params")
parser.add_argument('--deepnorm', default=False, action='store_true',
help="Enable DeepNorm")
parser.add_argument('--subln', default=False, action='store_true',
help="Enable SubLN")
parser.add_argument('--pretrained-dense-mt-model-path', type=str, default='')
# args for pseudo-MoE layers
parser.add_argument('--alternate-ffn-embed-dim', type=int, default=0,
help="FFN embed dim of alternate pseudo-MoE blocks")
parser.add_argument('--rel-pos-buckets', type=int, default=0,
help='')
parser.add_argument('--max-rel-pos', type=int, default=0,
help='')
# fmt: on
@classmethod
def build_model(cls, args, task):
"""Build a new model instance."""
# make sure all arguments are present in older models
base_architecture(args)
if getattr(args, "max_source_positions", None) is None:
args.max_source_positions = DEFAULT_MAX_SOURCE_POSITIONS
if getattr(args, "max_target_positions", None) is None:
args.max_target_positions = DEFAULT_MAX_TARGET_POSITIONS
args.ddp_rank = distributed_utils.get_data_parallel_rank()
src_dict, tgt_dict = task.source_dictionary, task.target_dictionary
if args.share_all_embeddings:
if src_dict != tgt_dict:
raise ValueError("--share-all-embeddings requires a joined dictionary")
if args.encoder_embed_dim != args.decoder_embed_dim:
raise ValueError(
"--share-all-embeddings requires --encoder-embed-dim to match --decoder-embed-dim"
)
if args.decoder_embed_path and (
args.decoder_embed_path != args.encoder_embed_path
):
raise ValueError(
"--share-all-embeddings not compatible with --decoder-embed-path"
)
encoder_embed_tokens = cls.build_embedding(
args, src_dict, args.encoder_embed_dim, args.encoder_embed_path
)
decoder_embed_tokens = encoder_embed_tokens
args.share_decoder_input_output_embed = True
else:
encoder_embed_tokens = cls.build_embedding(
args, src_dict, args.encoder_embed_dim, args.encoder_embed_path
)
decoder_embed_tokens = cls.build_embedding(
args, tgt_dict, args.decoder_embed_dim, args.decoder_embed_path
)
if getattr(args, "offload_activations", False):
args.checkpoint_activations = True # offloading implies checkpointing
encoder_embed_positions = (
PositionalEmbedding(
args.max_source_positions,
args.encoder_embed_dim,
src_dict.pad(),
learned=args.encoder_learned_pos,
)
if not args.no_token_positional_embeddings
else None
)
decoder_embed_positions = (
PositionalEmbedding(
args.max_target_positions,
args.decoder_embed_dim,
tgt_dict.pad(),
learned=args.decoder_learned_pos,
)
if not args.no_token_positional_embeddings
else None
)
if args.share_decoder_input_output_embed:
output_projection = torch.nn.Linear(
decoder_embed_tokens.weight.shape[1],
decoder_embed_tokens.weight.shape[0],
bias=False,
)
output_projection.weight = decoder_embed_tokens.weight
else:
output_projection = torch.nn.Linear(
args.decoder_embed_dim, len(tgt_dict), bias=False
)
torch.nn.init.normal_(
output_projection.weight, mean=0, std=args.decoder_embed_dim**-0.5
)
encoder = cls.build_encoder(
args,
encoder_embed_tokens,
encoder_embed_positions,
src_dict,
)
decoder = cls.build_decoder(
args,
decoder_embed_tokens,
decoder_embed_positions,
output_projection,
tgt_dict,
)
if not args.share_all_embeddings:
min_params_to_wrap = getattr(
args, "min_params_to_wrap", DEFAULT_MIN_PARAMS_TO_WRAP
)
# fsdp_wrap is a no-op when --ddp-backend != fully_sharded
encoder = fsdp_wrap(encoder, min_num_params=min_params_to_wrap)
decoder = fsdp_wrap(decoder, min_num_params=min_params_to_wrap)
return cls(args, encoder, decoder)
@classmethod
def build_embedding(cls, args, dictionary, embed_dim, path=None):
num_embeddings = len(dictionary)
padding_idx = dictionary.pad()
emb = Embedding(num_embeddings, embed_dim, padding_idx)
# if provided, load from preloaded dictionaries
if path:
embed_dict = utils.parse_embedding(path)
utils.load_embedding(embed_dict, dictionary, emb)
return emb
@classmethod
def build_encoder(cls, args, embed_tokens, embed_positions, dictionary):
config = EncoderConfig()
config.override(args)
return MTEncoder(
config,
embed_tokens,
embed_positions,
is_encoder_decoder=True,
dictionary=dictionary,
)
@classmethod
def build_decoder(
cls, args, embed_tokens, embed_positions, output_projection, dictionary
):
config = DecoderConfig()
config.override(args)
return MTDecoder(
config,
embed_tokens,
embed_positions,
output_projection,
is_encoder_decoder=True,
dictionary=dictionary,
)
def forward(
self,
src_tokens,
src_lengths,
prev_output_tokens,
return_all_hiddens: bool = False,
features_only: bool = False,
**kwargs
):
encoder_out = self.encoder(src_tokens, return_all_hiddens=return_all_hiddens)
decoder_out = self.decoder(
prev_output_tokens,
encoder_out=encoder_out,
features_only=features_only,
return_all_hiddens=return_all_hiddens,
)
return decoder_out
def get_normalized_probs(
self,
net_output: Tuple[Tensor, Optional[Dict[str, List[Optional[Tensor]]]]],
log_probs: bool,
sample: Optional[Dict[str, Tensor]] = None,
):
"""Get normalized probabilities (or log probs) from a net's output."""
return self.get_normalized_probs_scriptable(net_output, log_probs, sample)
class MTEncoder(Encoder, FairseqEncoder):
def forward(self, src_tokens, **kwargs):
self_attn_padding_mask = src_tokens.eq(self.dictionary.pad())
return super().forward(
src_tokens=src_tokens, encoder_padding_mask=self_attn_padding_mask, **kwargs
)
def reorder_encoder_out(self, encoder_out, new_order):
new_encoder_out = encoder_out["encoder_out"].index_select(0, new_order)
new_encoder_embedding = encoder_out["encoder_embedding"].index_select(
0, new_order
)
new_encoder_padding_mask = encoder_out["encoder_padding_mask"].index_select(
0, new_order
)
encoder_states = encoder_out["encoder_states"]
if len(encoder_states) > 0:
for idx, state in enumerate(encoder_states):
encoder_states[idx] = state.index_select(0, new_order)
return {
"encoder_out": new_encoder_out, # T x B x C
"encoder_padding_mask": new_encoder_padding_mask,
"encoder_embedding": new_encoder_embedding, # B x T x C
"encoder_states": encoder_states, # List[T x B x C]
}
def max_positions(self):
return self.embed_positions.max_positions
@register_model_architecture("mt", "mt_base")
def base_architecture(args):
args.encoder_embed_path = getattr(args, "encoder_embed_path", None)
args.encoder_embed_dim = getattr(args, "encoder_embed_dim", 512)
args.encoder_ffn_embed_dim = getattr(args, "encoder_ffn_embed_dim", 2048)
args.encoder_layers = getattr(args, "encoder_layers", 6)
args.encoder_attention_heads = getattr(args, "encoder_attention_heads", 8)
args.encoder_normalize_before = getattr(args, "encoder_normalize_before", False)
args.encoder_learned_pos = getattr(args, "encoder_learned_pos", False)
args.decoder_embed_path = getattr(args, "decoder_embed_path", None)
args.decoder_embed_dim = getattr(args, "decoder_embed_dim", args.encoder_embed_dim)
args.decoder_ffn_embed_dim = getattr(
args, "decoder_ffn_embed_dim", args.encoder_ffn_embed_dim
)
args.decoder_layers = getattr(args, "decoder_layers", 6)
args.decoder_attention_heads = getattr(args, "decoder_attention_heads", 8)
args.decoder_normalize_before = getattr(args, "decoder_normalize_before", False)
args.decoder_learned_pos = getattr(args, "decoder_learned_pos", False)
args.attention_dropout = getattr(args, "attention_dropout", 0.0)
args.activation_dropout = getattr(args, "activation_dropout", 0.0)
args.activation_fn = getattr(args, "activation_fn", "relu")
args.dropout = getattr(args, "dropout", 0.1)
args.adaptive_softmax_cutoff = getattr(args, "adaptive_softmax_cutoff", None)
args.adaptive_softmax_dropout = getattr(args, "adaptive_softmax_dropout", 0)
args.share_decoder_input_output_embed = getattr(
args, "share_decoder_input_output_embed", False
)
args.share_all_embeddings = getattr(args, "share_all_embeddings", False)
args.no_token_positional_embeddings = getattr(
args, "no_token_positional_embeddings", False
)
args.adaptive_input = getattr(args, "adaptive_input", False)
args.no_cross_attention = getattr(args, "no_cross_attention", False)
args.cross_self_attention = getattr(args, "cross_self_attention", False)
args.decoder_output_dim = getattr(
args, "decoder_output_dim", args.decoder_embed_dim
)
args.decoder_input_dim = getattr(args, "decoder_input_dim", args.decoder_embed_dim)
args.no_scale_embedding = getattr(args, "no_scale_embedding", False)
args.layernorm_embedding = getattr(args, "layernorm_embedding", False)
args.tie_adaptive_weights = getattr(args, "tie_adaptive_weights", False)
args.checkpoint_activations = getattr(args, "checkpoint_activations", False)
args.offload_activations = getattr(args, "offload_activations", False)
if args.offload_activations:
args.checkpoint_activations = True
args.encoder_layers_to_keep = getattr(args, "encoder_layers_to_keep", None)
args.decoder_layers_to_keep = getattr(args, "decoder_layers_to_keep", None)
args.encoder_layerdrop = getattr(args, "encoder_layerdrop", 0)
args.decoder_layerdrop = getattr(args, "decoder_layerdrop", 0)
args.quant_noise_pq = getattr(args, "quant_noise_pq", 0)
args.quant_noise_pq_block_size = getattr(args, "quant_noise_pq_block_size", 8)
args.quant_noise_scalar = getattr(args, "quant_noise_scalar", 0)
args.is_moe = getattr(args, "is_moe", False)
args.selected_expert_count = getattr(args, "selected_expert_count", 2) | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/examples/fairseq/models/machine_translation.py | machine_translation.py |
import logging
from dataclasses import dataclass, field
from typing import Optional
import torch
import torch.nn as nn
import torch.nn.functional as F
from fairseq import utils
from fairseq.dataclass import ChoiceEnum, FairseqDataclass
from fairseq.models import BaseFairseqModel, register_model, register_model_architecture
from fairseq.models.squad import SQuADHead
from fairseq.models.transformer import DEFAULT_MIN_PARAMS_TO_WRAP, Embedding
from fairseq.modules import PositionalEmbedding
from omegaconf import II
try:
from apex.normalization import FusedLayerNorm as LayerNorm
except ModuleNotFoundError:
from torch.nn import LayerNorm
from torchscale.architecture.config import EncoderConfig
from .machine_translation import MTEncoder as Encoder
DEFAULT_MAX_SOURCE_POSITIONS = 1024
logger = logging.getLogger(__name__)
@dataclass
class BertConfig(FairseqDataclass):
activation_fn: ChoiceEnum(utils.get_available_activation_fns()) = field(
default="relu", metadata={"help": "activation function to use"}
)
dropout: float = field(default=0.1, metadata={"help": "dropout probability"})
attention_dropout: float = field(
default=0.0, metadata={"help": "dropout probability for attention weights"}
)
activation_dropout: float = field(
default=0.0, metadata={"help": "dropout probability after activation in FFN."}
)
encoder_embed_dim: int = field(
default=512, metadata={"help": "encoder embedding dimension"}
)
encoder_output_dim: int = field(
default=512, metadata={"help": "encoder output dimension"}
)
encoder_input_dim: int = field(
default=512, metadata={"help": "encoder input dimension"}
)
encoder_ffn_embed_dim: int = field(
default=2048, metadata={"help": "encoder embedding dimension for FFN"}
)
encoder_layers: int = field(default=6, metadata={"help": "num encoder layers"})
encoder_attention_heads: int = field(
default=8, metadata={"help": "num encoder attention heads"}
)
encoder_normalize_before: bool = field(
default=False, metadata={"help": "apply layernorm before each encoder block"}
)
no_encoder_final_norm: bool = field(
default=False,
metadata={"help": "don't add an extra layernorm after the last encoder block"},
)
no_token_positional_embeddings: bool = field(
default=False,
metadata={
"help": "if set, disables positional embeddings (outside self attention)"
},
)
share_encoder_input_output_embed: bool = field(
default=False, metadata={"help": "share encoder input and output embeddings"}
)
encoder_learned_pos: bool = field(
default=False,
metadata={"help": "use learned positional embeddings in the encoder"},
)
layernorm_embedding: bool = field(
default=False, metadata={"help": "add layernorm to embedding"}
)
no_scale_embedding: bool = field(
default=False, metadata={"help": "if True, dont scale embeddings"}
)
checkpoint_activations: bool = field(
default=False, metadata={"help": "checkpoint activations at each layer"}
)
offload_activations: bool = field(
default=False,
metadata={"help": "move checkpointed activations to CPU after they are used."},
)
# config for "Reducing Transformer Depth on Demand with Structured Dropout" (Fan et al., 2019)
encoder_layerdrop: float = field(
default=0.0, metadata={"help": "LayerDrop probability for encoder"}
)
encoder_layers_to_keep: Optional[str] = field(
default=None,
metadata={
"help": "which layers to *keep* when pruning as a comma-separated list"
},
)
# config for Fully Sharded Data Parallel (FSDP) training
min_params_to_wrap: int = field(
default=DEFAULT_MIN_PARAMS_TO_WRAP,
metadata={
"help": (
"minimum number of params for a layer to be wrapped with FSDP() when "
"training with --ddp-backend=fully_sharded. Smaller values will "
"improve memory efficiency, but may make torch.distributed "
"communication less efficient due to smaller input sizes. This option "
"is set to 0 (i.e., always wrap) when --checkpoint-activations or "
"--offload-activations are passed."
)
},
)
max_source_positions: int = field(
default=1024, metadata={"help": "max source positions"}
)
pooler_activation_fn: ChoiceEnum(utils.get_available_activation_fns()) = field(
default="relu", metadata={"help": "activation function to use for pooler layer"}
)
pooler_dropout: float = field(
default=0.0,
metadata={"help": "dropout probability in the masked_lm pooler layers"},
)
# options from other parts of the config
# add_bos_token: bool = II("task.add_bos_token")
# tokens_per_sample: int = II("task.tokens_per_sample")
tpu: bool = II("common.tpu")
rel_pos_buckets: int = field(default=0, metadata={"help": ""})
max_rel_pos: int = field(default=0, metadata={"help": ""})
use_xmoe: Optional[bool] = field(
default=False,
)
moe_freq: int = field(
default=0,
metadata={"help": "Frequency at which we insert MoE Transformer layers"},
)
moe_expert_count: int = field(
default=0, metadata={"help": "Number of experts in each MoE Layer"}
)
moe_gating_use_fp32: bool = field(
default=False,
metadata={"help": "Use FP32 computations in MoE top2 gating function"},
)
moe_second_expert_policy: str = field(
default="sampling",
metadata={"help": "policy for second expert, options: all/sampling/random"},
)
moe_normalize_gate_prob_before_dropping: bool = field(
default=False,
metadata={
"help": "whether to normalize gate probs before or after dropping experts for capacity and randomization"
},
)
moe_expert_ffn_dim: Optional[int] = field(
default=None, metadata={"help": "MoE expert FFN dimension"}
)
moe_top1_expert: Optional[bool] = field(
default=False, metadata={"help": "Use top1 gate instead of top2"}
)
moe_eval_capacity_token_fraction: Optional[float] = field(
default=0.25,
metadata={
"help": (
"Default: 0.25, Fraction of tokens as capacity during validation, "
"if set to negative, use same as training. range: (0.0, 1.0]."
)
},
)
moe_normalize_expert_grad: Optional[str] = field(
default="world_size",
metadata={
"help": "Divide expert gradients by (1) 'world_size' (2) 'sqrt_world_size'"
},
)
record_a2a_perf_stats: Optional[bool] = field(
default=False,
metadata={"help": "records all to all perf stats during distributed training"},
)
dummy_a2a: Optional[bool] = field(
default=False,
metadata={
"help": "By passes all to all during distributed training by returning the input buffer as output"
},
)
moe_batch_prioritized_routing: Optional[bool] = field(
default=False,
metadata={
"help": "if true orders token by the gate prob before capacity dropping."
},
)
ddp_rank: int = II("distributed_training.distributed_rank")
deepnorm: Optional[bool] = field(
default=False,
)
subln: Optional[bool] = field(
default=False,
)
@register_model("mlm", dataclass=BertConfig)
class BertModel(BaseFairseqModel):
def __init__(self, args, encoder):
super().__init__()
self.args = args
self.encoder = encoder
self.padding_idx = self.encoder.embed_tokens.padding_idx
self.classification_heads = nn.ModuleDict()
@classmethod
def build_model(cls, args, task):
"""Build a new model instance."""
args.max_source_positions = getattr(
args, "max_source_positions", DEFAULT_MAX_SOURCE_POSITIONS
)
embed_tokens = cls.build_embedding(
args, task.dictionary, args.encoder_embed_dim
)
embed_positions = (
PositionalEmbedding(
args.max_source_positions,
args.encoder_embed_dim,
task.dictionary.pad(),
learned=args.encoder_learned_pos,
)
if not args.no_token_positional_embeddings
else None
)
lm_head = cls.build_lm_head(
args,
args.encoder_embed_dim,
len(task.dictionary),
args.activation_fn,
weight=embed_tokens.weight,
)
config = EncoderConfig()
config.override(args)
encoder = Encoder(
config,
embed_tokens=embed_tokens,
embed_positions=embed_positions,
output_projection=lm_head,
is_encoder_decoder=False,
dictionary=task.dictionary,
)
return cls(args, encoder)
@classmethod
def build_embedding(cls, args, dictionary, embed_dim, path=None):
embed_tokens = Embedding(len(dictionary), embed_dim, dictionary.pad())
return embed_tokens
@classmethod
def build_lm_head(cls, args, embed_dim, output_dim, activation_fn, weight):
return LMHead(embed_dim, output_dim, activation_fn, weight)
def output_layer(self, features, masked_tokens=None):
return self.encoder.output_projection(features, masked_tokens=masked_tokens)
def register_classification_head(
self, name, num_classes=None, inner_dim=None, **kwargs
):
"""Register a classification head."""
if name in self.classification_heads:
prev_num_classes = self.classification_heads[name].out_proj.out_features
prev_inner_dim = self.classification_heads[name].dense.out_features
if num_classes != prev_num_classes or inner_dim != prev_inner_dim:
logger.warning(
're-registering head "{}" with num_classes {} (prev: {}) '
"and inner_dim {} (prev: {})".format(
name, num_classes, prev_num_classes, inner_dim, prev_inner_dim
)
)
self.classification_heads[name] = ClassificationHead(
self.args.encoder_embed_dim,
inner_dim or self.args.encoder_embed_dim,
num_classes,
self.args.pooler_activation_fn,
self.args.pooler_dropout,
)
def register_question_answering_head(self, name, num_classes=None):
self.classification_heads[name] = SQuADHead(
self.args.encoder_embed_dim,
)
def upgrade_state_dict_named(self, state_dict, name):
prefix = name + "." if name != "" else ""
# upgrade children modules
super().upgrade_state_dict_named(state_dict, name)
# Handle new classification heads present in the state dict.
current_head_names = (
[]
if not hasattr(self, "classification_heads")
else self.classification_heads.keys()
)
keys_to_delete = []
for k in state_dict.keys():
if not k.startswith(prefix + "classification_heads."):
continue
head_name = k[len(prefix + "classification_heads.") :].split(".")[0] # noqa: E203
num_classes = state_dict[
prefix + "classification_heads." + head_name + ".out_proj.weight"
].size(0)
inner_dim = state_dict[
prefix + "classification_heads." + head_name + ".dense.weight"
].size(0)
if getattr(self.args, "load_checkpoint_heads", False):
if head_name not in current_head_names:
self.register_classification_head(head_name, num_classes, inner_dim)
else:
if head_name not in current_head_names:
logger.warning(
"deleting classification head ({}) from checkpoint "
"not present in current model: {}".format(head_name, k)
)
keys_to_delete.append(k)
elif (
num_classes
!= self.classification_heads[head_name].out_proj.out_features
or inner_dim
!= self.classification_heads[head_name].dense.out_features
):
logger.warning(
"deleting classification head ({}) from checkpoint "
"with different dimensions than current model: {}".format(
head_name, k
)
)
keys_to_delete.append(k)
for k in keys_to_delete:
del state_dict[k]
# Copy any newly-added classification heads into the state dict
# with their current weights.
if hasattr(self, "classification_heads"):
cur_state = self.classification_heads.state_dict()
for k, v in cur_state.items():
if prefix + "classification_heads." + k not in state_dict:
logger.info("Overwriting " + prefix + "classification_heads." + k)
state_dict[prefix + "classification_heads." + k] = v
def get_normalized_probs_scriptable(
self,
net_output,
log_probs,
sample = None,
):
logits = net_output[0]
if log_probs:
return utils.log_softmax(logits, dim=-1)
else:
return utils.softmax(logits, dim=-1)
def forward(
self,
src_tokens=None,
features_only=False,
return_all_hiddens=False,
classification_head_name=None,
masked_tokens=None,
**kwargs
):
encoder_out = self.encoder(
src_tokens, features_only=True, return_all_hiddens=return_all_hiddens
)
x, extra = encoder_out["encoder_out"], encoder_out
if classification_head_name is not None:
x = self.classification_heads[classification_head_name](x)
elif not features_only:
x = self.output_layer(x, masked_tokens=masked_tokens)
return x, extra
class ClassificationHead(nn.Module):
"""Head for sentence-level classification tasks."""
def __init__(
self,
input_dim,
inner_dim,
num_classes,
activation_fn,
pooler_dropout,
):
super().__init__()
self.dense = nn.Linear(input_dim, inner_dim)
self.activation_fn = utils.get_activation_fn(activation_fn)
self.dropout = nn.Dropout(p=pooler_dropout)
self.out_proj = nn.Linear(inner_dim, num_classes)
def forward(self, features, **kwargs):
x = features[:, 0, :] # take <s> token (equiv. to [CLS])
x = self.dropout(x)
x = self.dense(x)
x = self.activation_fn(x.float()).type_as(x)
x = self.dropout(x)
x = self.out_proj(x)
return x
class LMHead(nn.Module):
"""Head for masked language modeling."""
def __init__(self, embed_dim, output_dim, activation_fn, weight=None):
super().__init__()
self.dense = nn.Linear(embed_dim, embed_dim)
self.activation_fn = utils.get_activation_fn(activation_fn)
self.layer_norm = LayerNorm(embed_dim)
if weight is None:
weight = nn.Linear(embed_dim, output_dim, bias=False).weight
self.weight = weight
self.bias = nn.Parameter(torch.zeros(output_dim))
def forward(self, features, masked_tokens=None, **kwargs):
# Only project the masked tokens while training,
# saves both memory and computation
if masked_tokens is not None:
features = features[masked_tokens, :]
x = self.dense(features)
x = self.activation_fn(x.float()).type_as(x)
x = self.layer_norm(x)
# project back to size of vocabulary with bias
x = F.linear(x, self.weight) + self.bias
return x
@register_model_architecture("mlm", "mlm_base")
def base_unilm_architecture(args):
if hasattr(args, "encoder_final_norm"):
args.no_encoder_final_norm = not args.encoder_final_norm
args.dropout = getattr(args, "dropout", 0.1)
args.attention_dropout = getattr(args, "attention_dropout", 0.0)
args.activation_dropout = getattr(args, "activation_dropout", 0.0)
args.pooler_dropout = getattr(args, "pooler_dropout", 0.0)
args.encoder_embed_dim = getattr(args, "encoder_embed_dim", 768)
args.encoder_ffn_embed_dim = getattr(args, "encoder_ffn_embed_dim", 3072)
args.encoder_layers = getattr(args, "encoder_layers", 12)
args.encoder_attention_heads = getattr(args, "encoder_attention_heads", 12)
args.encoder_learned_pos = getattr(args, "encoder_learned_pos", True)
args.activation_fn = getattr(args, "activation_fn", "gelu")
args.pooler_activation_fn = getattr(args, "pooler_activation_fn", "tanh")
args.encoder_layerdrop = getattr(args, "encoder_layerdrop", 0)
args.encoder_layers_to_keep = getattr(args, "encoder_layers_to_keep", None)
# args.add_bos_token = getattr(args, "add_bos_token", False)
args.no_token_positional_embeddings = getattr(
args, "no_token_positional_embeddings", False
)
args.share_encoder_input_output_embed = getattr(
args, "share_encoder_input_output_embed", True
)
args.encoder_output_dim = getattr(
args, "encoder_output_dim", args.encoder_embed_dim
)
args.encoder_input_dim = getattr(args, "encoder_input_dim", args.encoder_embed_dim)
# Model training is not stable without this
args.encoder_normalize_before = getattr(args, "encoder_normalize_before", False)
args.no_encoder_final_norm = getattr(args, "no_encoder_final_norm", False)
args.no_scale_embedding = getattr(args, "no_scale_embedding", True)
args.layernorm_embedding = getattr(args, "layernorm_embedding", True)
args.checkpoint_activations = getattr(args, "checkpoint_activations", False)
args.offload_activations = getattr(args, "offload_activations", False)
if args.offload_activations:
args.checkpoint_activations = True | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/examples/fairseq/models/bert.py | bert.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
from dataclasses import dataclass, field
from typing import Optional
import torch
from fairseq import distributed_utils, utils
from fairseq.dataclass import ChoiceEnum, FairseqDataclass
from fairseq.models import (
FairseqIncrementalDecoder,
FairseqLanguageModel,
register_model,
register_model_architecture,
)
from fairseq.models.transformer import DEFAULT_MIN_PARAMS_TO_WRAP, Embedding
from fairseq.modules import PositionalEmbedding
from omegaconf import II
from torchscale.architecture.config import DecoderConfig
from torchscale.architecture.decoder import Decoder
DEFAULT_MAX_TARGET_POSITIONS = 1024
logger = logging.getLogger(__name__)
@dataclass
class LanguageConfig(FairseqDataclass):
activation_fn: ChoiceEnum(utils.get_available_activation_fns()) = field(
default="relu", metadata={"help": "activation function to use"}
)
dropout: float = field(default=0.1, metadata={"help": "dropout probability"})
attention_dropout: float = field(
default=0.0, metadata={"help": "dropout probability for attention weights"}
)
activation_dropout: float = field(
default=0.0, metadata={"help": "dropout probability after activation in FFN."}
)
relu_dropout: float = field(
default=0.0, metadata={"help": "dropout probability after activation in FFN."}
)
decoder_embed_dim: int = field(
default=512, metadata={"help": "decoder embedding dimension"}
)
decoder_output_dim: int = field(
default=512, metadata={"help": "decoder output dimension"}
)
decoder_input_dim: int = field(
default=512, metadata={"help": "decoder input dimension"}
)
decoder_ffn_embed_dim: int = field(
default=2048, metadata={"help": "decoder embedding dimension for FFN"}
)
decoder_layers: int = field(default=6, metadata={"help": "num decoder layers"})
decoder_attention_heads: int = field(
default=8, metadata={"help": "num decoder attention heads"}
)
decoder_normalize_before: bool = field(
default=False, metadata={"help": "apply layernorm before each decoder block"}
)
no_token_positional_embeddings: bool = field(
default=False,
metadata={
"help": "if set, disables positional embeddings (outside self attention)"
},
)
share_decoder_input_output_embed: bool = field(
default=False, metadata={"help": "share decoder input and output embeddings"}
)
decoder_learned_pos: bool = field(
default=False,
metadata={"help": "use learned positional embeddings in the decoder"},
)
layernorm_embedding: bool = field(
default=False, metadata={"help": "add layernorm to embedding"}
)
no_scale_embedding: bool = field(
default=False, metadata={"help": "if True, dont scale embeddings"}
)
checkpoint_activations: bool = field(
default=False, metadata={"help": "checkpoint activations at each layer"}
)
offload_activations: bool = field(
default=False,
metadata={"help": "move checkpointed activations to CPU after they are used."},
)
# config for Fully Sharded Data Parallel (FSDP) training
min_params_to_wrap: int = field(
default=DEFAULT_MIN_PARAMS_TO_WRAP,
metadata={
"help": (
"minimum number of params for a layer to be wrapped with FSDP() when "
"training with --ddp-backend=fully_sharded. Smaller values will "
"improve memory efficiency, but may make torch.distributed "
"communication less efficient due to smaller input sizes. This option "
"is set to 0 (i.e., always wrap) when --checkpoint-activations or "
"--offload-activations are passed."
)
},
)
moe_freq: int = field(
default=0,
metadata={"help": "Frequency at which we insert MoE Transformer layers"},
)
moe_expert_count: int = field(
default=0, metadata={"help": "Number of experts in each MoE Layer"}
)
moe_gating_use_fp32: bool = field(
default=False,
metadata={"help": "Use FP32 computations in MoE top2 gating function"},
)
moe_second_expert_policy: str = field(
default="sampling",
metadata={"help": "policy for second expert, options: all/sampling/random"},
)
moe_normalize_gate_prob_before_dropping: bool = field(
default=False,
metadata={
"help": "whether to normalize gate probs before or after dropping experts for capacity and randomization"
},
)
moe_expert_ffn_dim: Optional[int] = field(
default=None, metadata={"help": "MoE expert FFN dimension"}
)
moe_top1_expert: Optional[bool] = field(
default=False, metadata={"help": "Use top1 gate instead of top2"}
)
moe_eval_capacity_token_fraction: Optional[float] = field(
default=0.25,
metadata={
"help": (
"Default: 0.25, Fraction of tokens as capacity during validation, "
"if set to negative, use same as training. range: (0.0, 1.0]."
)
},
)
moe_normalize_expert_grad: Optional[str] = field(
default="world_size",
metadata={
"help": "Divide expert gradients by (1) 'world_size' (2) 'sqrt_world_size'"
},
)
record_a2a_perf_stats: Optional[bool] = field(
default=False,
metadata={"help": "records all to all perf stats during distributed training"},
)
dummy_a2a: Optional[bool] = field(
default=False,
metadata={
"help": "By passes all to all during distributed training by returning the input buffer as output"
},
)
moe_batch_prioritized_routing: Optional[bool] = field(
default=False,
metadata={
"help": "if true orders token by the gate prob before capacity dropping."
},
)
use_xmoe: Optional[bool] = field(
default=False,
)
# options from other parts of the config
add_bos_token: bool = II("task.add_bos_token")
tokens_per_sample: int = II("task.tokens_per_sample")
max_target_positions: Optional[int] = II("task.max_target_positions")
tpu: bool = II("common.tpu")
memory_efficient_fp16: bool = II("common.memory_efficient_fp16")
fp16: bool = II("common.fp16")
fp16_no_flatten_grads: bool = II("common.fp16_no_flatten_grads")
ddp_backend: str = II("distributed_training.ddp_backend")
world_size: int = II("distributed_training.distributed_world_size")
distributed_rank: int = II("distributed_training.distributed_rank")
ddp_rank: int = II("distributed_training.distributed_rank")
deepnorm: Optional[bool] = field(
default=False,
)
subln: Optional[bool] = field(
default=False,
)
rel_pos_buckets: Optional[int] = field(
default=0,
)
max_rel_pos: Optional[int] = field(
default=0,
)
xpos_rel_pos: Optional[bool] = field(
default=False,
)
xpos_scale_base: Optional[int] = field(
default=512,
)
@register_model("lm", dataclass=LanguageConfig)
class LanguageModel(FairseqLanguageModel):
def __init__(self, args, decoder):
self.args = args
super().__init__(decoder)
@classmethod
def build_model(cls, args, task):
if getattr(args, "max_target_positions", None) is None:
args.max_target_positions = getattr(
args, "tokens_per_sample", DEFAULT_MAX_TARGET_POSITIONS
)
embed_tokens = cls.build_embedding(
args, task.source_dictionary, args.decoder_embed_dim
)
embed_positions = (
PositionalEmbedding(
args.max_target_positions,
args.decoder_embed_dim,
task.dictionary.pad(),
learned=args.decoder_learned_pos,
)
if not args.no_token_positional_embeddings
else None
)
if args.share_decoder_input_output_embed:
output_projection = torch.nn.Linear(
embed_tokens.weight.shape[1],
embed_tokens.weight.shape[0],
bias=False,
)
output_projection.weight = embed_tokens.weight
else:
output_projection = torch.nn.Linear(
args.decoder_embed_dim, len(task.dictionary), bias=False
)
torch.nn.init.normal_(
output_projection.weight, mean=0, std=args.decoder_embed_dim**-0.5
)
if getattr(args, "moe_freq", 0) > 0 and (
getattr(args, "fp16", False)
and not getattr(args, "memory_efficient_fp16", False)
and getattr(args, "ddp_backend", None) != "fully_sharded"
):
assert (
args.fp16_no_flatten_grads
), "If training moe models, set --fp16-no-flatten-grads to calculate correct gradnorm"
args.ddp_rank = distributed_utils.get_data_parallel_rank()
config = DecoderConfig()
config.override(args)
decoder = LMDecoder(
config,
embed_tokens,
embed_positions,
output_projection,
is_encoder_decoder=False,
dictionary=task.dictionary,
)
return cls(args, decoder)
@classmethod
def build_embedding(cls, args, dictionary, embed_dim, path=None):
return Embedding(len(dictionary), embed_dim, dictionary.pad())
class LMDecoder(Decoder, FairseqIncrementalDecoder):
def forward(self, src_tokens, **kwargs):
self_attn_padding_mask = src_tokens.eq(self.dictionary.pad())
return super().forward(src_tokens, self_attn_padding_mask, **kwargs)
def max_positions(self):
return self.embed_positions.max_positions
def reorder_incremental_state_scripting(
self,
incremental_state,
new_order,
):
for module in incremental_state:
for key in incremental_state[module]:
result = incremental_state[module][key].index_select(0, new_order)
incremental_state[module][key] = result
@register_model_architecture("lm", "lm_base")
def base_lm_architecture(args):
# backward compatibility for older model checkpoints
if hasattr(args, "no_tie_adaptive_proj"):
# previous models defined --no-tie-adaptive-proj, so use the existence of
# that option to determine if this is an "old" model checkpoint
args.no_decoder_final_norm = True # old models always set this to True
if args.no_tie_adaptive_proj is False:
args.tie_adaptive_proj = True
if hasattr(args, "decoder_final_norm"):
args.no_decoder_final_norm = not args.decoder_final_norm
args.dropout = getattr(args, "dropout", 0.1)
args.attention_dropout = getattr(args, "attention_dropout", 0.0)
args.decoder_embed_dim = getattr(args, "decoder_embed_dim", 512)
args.decoder_ffn_embed_dim = getattr(args, "decoder_ffn_embed_dim", 2048)
args.decoder_layers = getattr(args, "decoder_layers", 6)
args.decoder_attention_heads = getattr(args, "decoder_attention_heads", 8)
args.adaptive_softmax_cutoff = getattr(args, "adaptive_softmax_cutoff", None)
args.adaptive_softmax_dropout = getattr(args, "adaptive_softmax_dropout", 0)
args.adaptive_softmax_factor = getattr(args, "adaptive_softmax_factor", 4)
args.decoder_learned_pos = getattr(args, "decoder_learned_pos", False)
args.activation_fn = getattr(args, "activation_fn", "relu")
args.decoder_layerdrop = getattr(args, "decoder_layerdrop", 0)
args.decoder_layers_to_keep = getattr(args, "decoder_layers_to_keep", None)
args.base_layers = getattr(args, "base_layers", 0)
args.base_sublayers = getattr(args, "base_sublayers", 1)
args.base_shuffle = getattr(args, "base_shuffle", False)
args.add_bos_token = getattr(args, "add_bos_token", False)
args.no_token_positional_embeddings = getattr(
args, "no_token_positional_embeddings", False
)
args.share_decoder_input_output_embed = getattr(
args, "share_decoder_input_output_embed", False
)
args.character_embeddings = getattr(args, "character_embeddings", False)
args.decoder_output_dim = getattr(
args, "decoder_output_dim", args.decoder_embed_dim
)
args.decoder_input_dim = getattr(args, "decoder_input_dim", args.decoder_embed_dim)
# Model training is not stable without this
args.decoder_normalize_before = True
args.no_decoder_final_norm = getattr(args, "no_decoder_final_norm", False)
args.adaptive_input = getattr(args, "adaptive_input", False)
args.adaptive_input_factor = getattr(args, "adaptive_input_factor", 4)
args.adaptive_input_cutoff = getattr(args, "adaptive_input_cutoff", None)
args.tie_adaptive_weights = getattr(args, "tie_adaptive_weights", False)
args.tie_adaptive_proj = getattr(args, "tie_adaptive_proj", False)
args.no_scale_embedding = getattr(args, "no_scale_embedding", False)
args.layernorm_embedding = getattr(args, "layernorm_embedding", False)
args.checkpoint_activations = getattr(args, "checkpoint_activations", False)
args.offload_activations = getattr(args, "offload_activations", False)
if args.offload_activations:
args.checkpoint_activations = True | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/examples/fairseq/models/language_modeling.py | language_modeling.py |
import math
import torch
import torch.nn.functional as F
from fairseq import metrics, utils
from fairseq.criterions import MoECriterion, register_criterion, MoECriterionConfig
@register_criterion("masked_lm_moe_cross_entropy", dataclass=MoECriterionConfig)
class MaskedLMMoECrossEntropyCriterion(MoECriterion):
def compute_inner_loss(self, model, sample, reduce=True):
masked_tokens = sample["target"].ne(self.padding_idx)
sample_size = masked_tokens.int().sum()
masked_tokens = torch.where(
masked_tokens.any(),
masked_tokens,
masked_tokens.new([True]),
)
net_output = model(**sample["net_input"], masked_tokens=masked_tokens)
lprobs = model.get_normalized_probs(net_output, log_probs=True)
lprobs = lprobs.view(-1, lprobs.size(-1))
target = model.get_targets(sample, net_output)
if masked_tokens is not None:
target = target[masked_tokens]
nll_loss = F.nll_loss(
lprobs,
target.view(-1),
ignore_index=self.padding_idx,
reduction="sum" if reduce else "none",
)
logging_output = {
"inner_loss": nll_loss.data,
"ntokens": sample["ntokens"],
"nsentences": sample["target"].size(0),
"sample_size": sample_size,
}
return net_output, nll_loss, sample_size, logging_output
@staticmethod
def reduce_metrics(logging_outputs) -> None:
"""Aggregate logging outputs from data parallel training."""
MaskedLMMoECrossEntropyCriterion.reduce_moe_metrics(logging_outputs)
loss_sum = sum(log.get("inner_loss", 0) for log in logging_outputs)
ntokens = sum(log.get("ntokens", 0) for log in logging_outputs)
sample_size = sum(log.get("sample_size", 0) for log in logging_outputs)
# we divide by log(2) to convert the loss from base e to base 2
metrics.log_scalar(
"inner_loss", loss_sum / sample_size / math.log(2), sample_size, round=3
)
if sample_size != ntokens:
metrics.log_scalar(
"nll_loss", loss_sum / ntokens / math.log(2), ntokens, round=3
)
metrics.log_derived(
"ppl", lambda meters: utils.get_perplexity(meters["nll_loss"].avg)
)
else:
metrics.log_derived(
"ppl", lambda meters: utils.get_perplexity(meters["inner_loss"].avg)
) | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/examples/fairseq/criterions/masked_lm_moe.py | masked_lm_moe.py |
import json
import logging
import os
from argparse import Namespace
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from dataclasses import dataclass, field
import sentencepiece as spm
from fairseq import utils
from fairseq.data import Dictionary
from fairseq.dataclass import ChoiceEnum, FairseqDataclass
from fairseq.tasks import FairseqTask, register_task
from omegaconf import II, MISSING
from .data.mlm_loader import MLMLoader
logger = logging.getLogger(__name__)
SAMPLE_BREAK_MODE_CHOICES = ChoiceEnum(["none", "complete", "complete_doc", "eos"])
SHORTEN_METHOD_CHOICES = ChoiceEnum(["none", "truncate", "random_crop"])
@dataclass
class PretrainingConfig(FairseqDataclass):
data: str = field(
default=MISSING,
metadata={
"help": "colon separated path to data directories list, \
will be iterated upon during epochs in round-robin manner"
},
)
sample_break_mode: SAMPLE_BREAK_MODE_CHOICES = field(
default="complete",
metadata={
"help": 'If omitted or "none", fills each sample with tokens-per-sample '
'tokens. If set to "complete", splits samples only at the end '
"of sentence, but may include multiple sentences per sample. "
'"complete_doc" is similar but respects doc boundaries. '
'If set to "eos", includes only one sentence per sample.'
},
)
tokens_per_sample: int = field(
default=1024,
metadata={"help": "max number of tokens per sample for LM dataset"},
)
mask_prob: float = field(
default=0.15,
metadata={"help": "probability of replacing a token with mask"},
)
leave_unmasked_prob: float = field(
default=0.1,
metadata={"help": "probability that a masked token is unmasked"},
)
random_token_prob: float = field(
default=0.1,
metadata={"help": "probability of replacing a token with a random token"},
)
freq_weighted_replacement: bool = field(
default=False,
metadata={"help": "sample random replacement words based on word frequencies"},
)
mask_whole_words: bool = field(
default=False,
metadata={"help": "mask whole words; you may also want to set --bpe"},
)
mask_multiple_length: int = field(
default=1,
metadata={"help": "repeat the mask indices multiple times"},
)
mask_stdev: float = field(
default=0.0,
metadata={"help": "stdev of the mask length"},
)
shorten_method: SHORTEN_METHOD_CHOICES = field(
default="none",
metadata={
"help": "if not none, shorten sequences that exceed --tokens-per-sample"
},
)
shorten_data_split_list: str = field(
default="",
metadata={
"help": "comma-separated list of dataset splits to apply shortening to, "
'e.g., "train,valid" (default: all dataset splits)'
},
)
seed: int = II("common.seed")
span_length: float = field(
default=3.0,
metadata={"help": "average span length for masking"},
)
remove_source_sentinel: bool = field(
default=False,
metadata={"help": "remove the source sentinel for the span corruption task"},
)
remove_target_sentinel: bool = field(
default=False,
metadata={"help": "remove the target sentinel for the span corruption task"},
)
batch_read_ahead: int = field(
default=100000,
metadata={"help": "batch read ahead size for infinibatch"},
)
required_batch_size_multiple: int = II("dataset.required_batch_size_multiple")
spm_model: str = field(
default="",
metadata={"help": "sentencepice model to tokenize the data"},
)
dict_file: str = field(
default="",
metadata={"help": ""},
)
pad_to_max_length: bool = field(
default=False,
)
@register_task("pretraining", dataclass=PretrainingConfig)
class PLMTask(FairseqTask):
def __init__(self, cfg, dictionary, tokenizer):
super().__init__(cfg)
self.cfg = cfg
self.dictionary = dictionary
self.tokenizer = tokenizer
self.seed = cfg.seed
self.mask_idx = dictionary.index("<mask>")
@classmethod
def setup_task(cls, cfg, **kwargs):
paths = utils.split_paths(cfg.data)
assert len(paths) > 0
if cfg.dict_file != "":
dictionary = Dictionary.load(cfg.dict_file)
else:
dictionary = Dictionary.load(os.path.join(paths[0], "dict.txt"))
# add mask token
dictionary.add_symbol("<mask>")
for i in range(100):
dictionary.add_symbol(f"<mask_{i}>")
dictionary.pad_to_multiple_(cfg.required_batch_size_multiple)
logger.info("dictionary: {} types".format(len(dictionary)))
# tokenizer = SentencepieceBPE(Namespace(sentencepiece_model=cfg.spm_model))
tokenizer = spm.SentencePieceProcessor()
tokenizer.Load(cfg.spm_model)
return cls(cfg, dictionary, tokenizer)
def load_dataset(self, split, epoch=1, combine=False, **kwargs):
self.datasets[split] = {
"data": json.load(open(f"{self.cfg.data}/json/{split}.json")),
"data_dir": self.cfg.data,
"shuffle": True if split == "train" else False,
}
self.datasets[split] = Namespace(**self.datasets[split])
def dataset(self, split):
if split not in self.datasets:
raise KeyError("Dataset not loaded: " + split)
return self.datasets[split]
def get_batch_iterator(
self,
dataset,
max_tokens=None,
max_sentences=None,
max_positions=None,
ignore_invalid_inputs=False,
required_batch_size_multiple=1,
seed=1,
num_shards=1,
shard_id=0,
num_workers=0,
epoch=1,
data_buffer_size=0,
disable_iterator_cache=False,
**kwargs,
):
return MLMLoader(
self.cfg,
dataset,
self.dictionary,
self.tokenizer,
max_tokens=max_tokens,
max_sentences=max_sentences,
max_positions=max_positions,
ignore_invalid_inputs=ignore_invalid_inputs,
required_batch_size_multiple=required_batch_size_multiple,
seed=seed,
num_shards=num_shards,
shard_id=shard_id,
)
@property
def source_dictionary(self):
return self.dictionary
@property
def target_dictionary(self):
return self.dictionary | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/examples/fairseq/tasks/pretraining.py | pretraining.py |
import copy
import itertools
import os
import numpy as np
from infinibatch import iterators
from .basic_loader import BaseBatchGen
from .utils import NativeCheckpointableIterator, WeightIterator
class MLMLoader(BaseBatchGen):
def __init__(
self,
args,
dataset,
dictionary,
tokenizer,
max_tokens=None,
max_sentences=None,
max_positions=None,
ignore_invalid_inputs=False,
required_batch_size_multiple=1,
seed=1,
num_shards=1,
shard_id=0,
):
super().__init__()
self.args = args
self.data = dataset.data
self.data_dir = dataset.data_dir
self.shuffle = dataset.shuffle
self.dictionary = dictionary
self.tokenizer = tokenizer
self.max_tokens = max_tokens
self.max_sentences = max_sentences
self.max_positions = max_positions
self.tokens_per_sample = args.tokens_per_sample
self.sample_break_mode = args.sample_break_mode
self.ignore_invalid_inputs = ignore_invalid_inputs
self.required_batch_size_multiple = required_batch_size_multiple
self.seed = str(seed)
self.num_shards = num_shards
self.shard_id = shard_id
self.batch_read_ahead = args.batch_read_ahead
self._build_iter()
def _build_iter(self):
tokenized_lines = self._multilingual_tokenize()
self.padded_batches = self._batchify(tokenized_lines)
prefetch_batches = iterators.PrefetchIterator(
self.padded_batches,
buffer_size=10000,
buffer_in_main_process=True,
log_empty_buffer_warning=True and self.shard_id == 0,
)
prefetch_batches = iterators.MapIterator(prefetch_batches, self._move_to_tensor)
self._iter = prefetch_batches
def _multilingual_tokenize(self):
multilingual_iters = []
weights = []
for data in self.data:
multilingual_iters.append(self._tokenize(data))
if "weight" in data:
weights.append(float(data["weight"]))
else:
weights.append(int(data["count"]))
if len(multilingual_iters) == 1:
return multilingual_iters[0]
sampling_iterator = WeightIterator(weights)
control_iterator = NativeCheckpointableIterator(sampling_iterator)
tokenized_lines = iterators.MultiplexIterator(
control_iterator, multilingual_iters
)
return tokenized_lines
def _tokenize(self, data):
"""
data:
{
'source': list[Path],
'source_lang': str,
'count': int,
'weight': float,
'name': str,
}
"""
dataset = list(
zip(
data["source"],
itertools.repeat(data["source_lang"]),
)
)
if self.shuffle:
chunk_files = iterators.InfinitePermutationSourceIterator(
dataset,
seed=self.seed,
shuffle=self.shuffle,
num_instances=self.num_shards,
instance_rank=self.shard_id,
)
else:
chunk_files = iterators.ChunkedSourceIterator(
dataset,
num_instances=self.num_shards,
instance_rank=self.shard_id,
)
tokenized_lines = iterators.SelectManyIterator(
chunk_files, lambda files: self._read_from_files(*files)
)
tokenized_lines = iterators.SamplingRandomMapIterator(
tokenized_lines, self._prepare, self.seed
)
return tokenized_lines
def _batchify(self, lines):
if self.max_sentences is not None:
if self.batch_read_ahead > 0:
lines = iterators.BlockwiseShuffleIterator(
lines, self.batch_read_ahead, self.seed
)
batches = iterators.FixedBatchIterator(lines, self.max_sentences)
else:
def dynamic_batch_size(sample):
lengths = [len(x) for x in sample]
batch_size = self.max_tokens // max(lengths)
batch_size = (
batch_size
// self.required_batch_size_multiple
* self.required_batch_size_multiple
)
return max(1, batch_size)
batches = iterators.BucketedReadaheadBatchIterator(
lines,
read_ahead=self.batch_read_ahead,
key=(lambda x: max(len(x[0]), len(x[1]))) if self.shuffle else None,
batch_size=dynamic_batch_size,
shuffle=self.shuffle,
seed=self.seed,
)
def collate(batch):
batch_size = len(batch)
mlm_source_max_length = max([len(x[0]) for x in batch])
mlm_target_max_length = max([len(x[1]) for x in batch])
s2s_source_max_length = max([len(x[2]) for x in batch])
s2s_target_max_length = max([len(x[3]) for x in batch])
if self.args.pad_to_max_length:
mlm_source_max_length = self.args.tokens_per_sample
mlm_target_max_length = self.args.tokens_per_sample
mlm_source_ids = np.full(
shape=(batch_size, mlm_source_max_length),
dtype=np.int32,
fill_value=self.dictionary.pad(),
)
mlm_target_ids = np.full(
shape=(batch_size, mlm_target_max_length),
dtype=np.int32,
fill_value=self.dictionary.pad(),
)
s2s_source_ids = np.full(
shape=(batch_size, s2s_source_max_length),
dtype=np.int32,
fill_value=self.dictionary.pad(),
)
s2s_target_ids = np.full(
shape=(batch_size, s2s_target_max_length - 1),
dtype=np.int32,
fill_value=self.dictionary.pad(),
)
s2s_prev_input_ids = np.full(
shape=(batch_size, s2s_target_max_length - 1),
dtype=np.int32,
fill_value=self.dictionary.pad(),
)
for i, (
mlm_input_ids,
mlm_label_ids,
s2s_input_ids,
s2s_label_ids,
) in enumerate(batch):
mlm_source_ids[i, : len(mlm_input_ids)] = mlm_input_ids
mlm_target_ids[i, : len(mlm_label_ids)] = mlm_label_ids
s2s_source_ids[i, : len(s2s_input_ids)] = s2s_input_ids
s2s_target_ids[i, : len(s2s_label_ids) - 1] = s2s_label_ids[1:]
s2s_prev_input_ids[i, : len(s2s_label_ids) - 1] = s2s_label_ids[:-1]
ret_batch = {
"net_input": {
"src_tokens": mlm_source_ids.astype(np.int64),
},
"target": mlm_target_ids.astype(np.int64),
"nsentences": batch_size,
"ntokens": sum([len(x[0]) for x in batch]),
}
return ret_batch
padded_batches = iterators.MapIterator(batches, collate)
return padded_batches
def _prepare(self, _random, doc):
nonmasked_tokens, masked_tokens = self._mask_lm(_random, doc)
nonnoise_spans, noise_spans = self._span_corruption(_random, doc)
return nonmasked_tokens, masked_tokens, nonnoise_spans, noise_spans
def _mask_lm(self, _random, doc):
def mask_tokens():
return "<mask>"
length = len(doc)
mask_tokens_num = int(length * self.args.mask_prob)
mask_tokens_num = min(max(mask_tokens_num, 1), length - 1)
possible_mask_positions = _random.sample(range(length), k=mask_tokens_num)
possible_mask_positions = sorted(possible_mask_positions)
nonmasked_tokens = copy.deepcopy(doc)
masked_tokens = [self.dictionary.pad() for _ in range(len(doc))]
for position in possible_mask_positions:
# masked_tokens.append(nonmasked_tokens[position])
masked_tokens[position] = nonmasked_tokens[position]
nonmasked_tokens[position] = self.dictionary.indices[mask_tokens()]
return nonmasked_tokens, masked_tokens
def _span_corruption(self, _random, doc):
def mask_tokens(i):
return f"<mask_{i}>"
length = len(doc)
noise_tokens_num = int(length * self.args.mask_prob)
noise_tokens_num = min(max(noise_tokens_num, 1), length - 1)
noise_spans_num = int(noise_tokens_num / self.args.span_length)
noise_spans_num = max(noise_spans_num, 1)
nonnoise_tokens_num = length - noise_tokens_num
if noise_spans_num == 1:
noise_split_positions = [0, noise_tokens_num]
else:
possible_split_positions = list(range(1, noise_tokens_num))
_random.shuffle(possible_split_positions)
noise_split_positions = sorted(
possible_split_positions[: noise_spans_num - 1]
)
noise_split_positions = [0] + noise_split_positions + [noise_tokens_num]
possible_insert_positions = list(range(nonnoise_tokens_num))
_random.shuffle(possible_insert_positions)
noise_insert_positions = sorted(possible_insert_positions[:noise_spans_num])
nonnoise_spans, noise_spans = [], []
last_end = 0
for i in range(noise_spans_num):
start_pos = noise_insert_positions[i] + noise_split_positions[i]
end_pos = noise_insert_positions[i] + noise_split_positions[i + 1]
mask_id = self.dictionary.indices[mask_tokens(i)]
if getattr(self.args, "remove_target_sentinel", False):
noise_spans.append(doc[start_pos:end_pos])
else:
noise_spans.append([mask_id] + doc[start_pos:end_pos])
if getattr(self.args, "remove_source_sentinel", False):
nonnoise_spans.extend(doc[last_end:start_pos])
else:
nonnoise_spans.extend(doc[last_end:start_pos] + [mask_id])
last_end = end_pos
nonnoise_spans.extend(doc[last_end:])
noise_spans = sum(noise_spans, [])
return nonnoise_spans, noise_spans
def _read_from_files(self, source_file, source_lang):
# data = []
file_path = os.path.join(self.data_dir, source_file)
if not os.path.exists(file_path):
print("| file {} not exists".format(file_path), flush=True)
return iter([]) # skip bad file
with open(file_path, "r", encoding="utf8") as f:
lines = f.read().strip().split("\n")
doc = [self.dictionary.bos()]
for line in lines:
if line == "":
if self.sample_break_mode == "complete_doc":
# data.append(doc)
yield doc
doc = [self.dictionary.bos()]
continue
tokenized_line = self.tokenizer.EncodeAsPieces(line)
tokenized_id = [
self.dictionary.index(token) for token in tokenized_line
] + [self.dictionary.eos_index]
if len(tokenized_id) > self.tokens_per_sample:
continue
if len(doc) + len(tokenized_id) > self.tokens_per_sample:
# data.append(doc)
yield doc
doc = [self.dictionary.bos()]
doc.extend(tokenized_id)
if len(doc) > 1 and len(doc) <= self.tokens_per_sample:
# data.append(doc)
yield doc
# return data | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/examples/fairseq/tasks/data/mlm_loader.py | mlm_loader.py |
import collections
from random import Random
from typing import Dict, Iterable, Optional
import numpy as np
from infinibatch import iterators
def apply_to_sample(f, sample):
if hasattr(sample, "__len__") and len(sample) == 0:
return {}
def _apply(x):
if isinstance(x, np.ndarray):
return f(x)
elif isinstance(x, collections.OrderedDict):
# OrderedDict has attributes that needs to be preserved
od = collections.OrderedDict(
(key, _apply(value)) for key, value in x.items()
)
od.__dict__ = x.__dict__
return od
elif isinstance(x, dict):
return {key: _apply(value) for key, value in x.items()}
elif isinstance(x, list):
return [_apply(x) for x in x]
elif isinstance(x, tuple):
return tuple(_apply(x) for x in x)
elif isinstance(x, set):
return {_apply(x) for x in x}
else:
return x
return _apply(sample)
class NativeCheckpointableIterator(iterators.CheckpointableIterator):
def __init__(self, iterable: Iterable):
self._input_iterable = iterable
self.setstate(None)
def getstate(self) -> Dict:
return {"num_items_yielded": self._num_items_yielded}
def setstate(self, checkpoint: Optional[Dict]):
self._iterator = iter(self._input_iterable)
self._num_items_yielded = (
iterators._advance_iterator(self._iterator, checkpoint["num_items_yielded"])
if checkpoint is not None
else 0
)
def __next__(self):
item = next(self._iterator)
self._num_items_yielded += 1
return item
def close(self):
pass
class WeightIterator(object):
def __init__(self, weights, seed):
self.weights = weights
self.seed = seed
self.control_index = list(range(len(weights)))
self.setstate(None)
def __iter__(self):
return self
def getstate(self):
return {"random_state": self._random_state}
def setstate(self, checkpoint):
self._random_state = checkpoint["random_state"] if checkpoint else None
self._random = (
None # this will trigger the lazy initialization in self.__next__
)
def __next__(self):
if self._random is None:
self._random = Random(self.seed)
if self._random_state is not None:
self._random.setstate(self._random_state)
idx = self._random.choices(self.control_index, self.weights)[0]
self._random_state = self._random.getstate()
return idx
def close(self):
pass | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/examples/fairseq/tasks/data/utils.py | utils.py |
import math
import warnings
import torch
import torch.distributed as dist
from fairseq.utils import multi_tensor_l2norm_available, multi_tensor_total_norm
@torch.no_grad()
def clip_grad_norm_(
params, max_norm, moe_expert_count, aggregate_norm_fn=None
) -> torch.Tensor:
def grad_exists(p):
return p is not None and getattr(p, "grad", None) is not None
if isinstance(params, torch.Tensor):
params = [params]
params = list(params)
params = list(filter(grad_exists, params))
grads, expert_grads, base_expert_grads, sharded_grads = [], [], [], []
denom = math.sqrt(max(dist.get_global_world_size(), moe_expert_count))
for p in params:
if hasattr(p, "expert"):
expert_grads.append(p.grad.detach() / denom)
elif hasattr(p, "base_expert"):
base_expert_grads.append(p.grad.detach())
elif hasattr(p, "_is_sharded"):
sharded_grads.append(p.grad.detach())
else:
grads.append(p.grad.detach())
if len(grads) == 0:
if len(params) > 0:
total_norm = params[0].new_tensor(0.0)
else:
total_norm = torch.tensor(0.0)
elif len(grads) == 1:
total_norm = torch.norm(grads[0], p=2, dtype=torch.float32)
else:
if multi_tensor_l2norm_available:
total_norm = multi_tensor_total_norm(grads)
else:
if torch.cuda.is_available():
warnings.warn(
"amp_C fused kernels unavailable, disabling multi_tensor_l2norm; "
"you may get better performance by installing NVIDIA's apex library"
)
device = torch.cuda.current_device()
elif grads[0].device.type == "xla":
device = grads[0].device
else:
device = torch.device("cpu")
total_norm = torch.norm(
torch.stack(
[torch.norm(g, p=2, dtype=torch.float32).to(device) for g in grads]
)
)
# calculate split_norm and all_reduce with other workers
norms = [total_norm]
for split_grads in [expert_grads, sharded_grads]:
if len(split_grads) == 0:
continue
split_norm = torch.norm(
torch.stack([torch.norm(g, p=2, dtype=torch.float32) for g in split_grads])
)
if dist.is_initialized():
split_norm.pow_(2)
dist.all_reduce(split_norm)
split_norm.sqrt_()
norms.append(split_norm)
if len(norms) > 1:
total_norm = torch.norm(torch.stack(norms))
if aggregate_norm_fn is not None:
total_norm = aggregate_norm_fn(total_norm)
if max_norm > 0:
max_norm = float(max_norm)
clip_coef = (max_norm / (total_norm + 1e-6)).clamp_(max=1)
for g in grads + expert_grads + sharded_grads + base_expert_grads:
g.mul_(clip_coef)
return total_norm | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/examples/fairseq/utils/sparse_clip.py | sparse_clip.py |
import time
import torch
from accelerate.utils import set_seed
from datasets import load_dataset
from torch.nn import CrossEntropyLoss
from torch.utils.data import DataLoader
from transformers import get_scheduler, default_data_collator, get_linear_schedule_with_warmup
from torch.optim import AdamW
from lion_pytorch import Lion
from kosmos import Kosmos, KosmosTokenizer
from accelerate import Accelerator
from rich.progres import Progress
from datasets import Image
from bitsandbytes.optim import AdamW8bit
def count_number_of_parameters(model, only_trainable: bool = True) -> int:
if only_trainable:
num_param: int = sum(p.numel()
for p in model.parameters() if p.requires.grad)
else:
num_params: int = sum(p.numel() for p in model.parameters() if p)
def prep_sample(sample):
question = sample["question"]
answer = sample["answer"].split("|!+")[1]
explanation = sample["explanation"]
text = f"Question: {question} Answer: {answer} Explanation: {explanation}"
image = sample["image"]
return {
"image": image,
"target_text": text
}
def train(args):
accelerator = Accelerator(
mixed_precision="fp16"
)
#if passed along set the training seed now
if args.seed is not None:
set_seed(args.seed)
model = Kosmos()
model = model.to(accelerator.device)
optimizer = Lion(model.parameters(), lr=args.learning_rate,
weight_decay=args.weight_decay)
lr_scheduler = get_linear_schedule_with_warmup(
optimizer=optimizer,
num_warmup_steps=args.warmup_steps,
num_training_steps=args.max_steps,
)
tokenizer = KosmosTokenizer()
dataset = load_dataset("bjoernp/vqax", split="text")
#dataset = dataset.cast_column("url", Image)
dataset = dataset.map(prep_sample, num_proc=8)
remove_columns = ['id', 'img_id', 'question', 'answer', 'explanation', 'none', 'image', 'target_text']
dataset = dataset.map(tokenizer.tokenize, batched=True,
batch_size=128, remove_columns=remove_columns)
train_dataloader = DataLoader(
dataset, collate_fn=default_data_collator, batch_size=args.batch_size, pin_memory=True
)
model, train_dataloader, optimizer, lr_scheduler = accelerator.prepare(model, train_dataloader, optimizer, lr_scheduler)
model.train()
accelerator.register_for_checkpointing(lr_scheduler)
model.clip_model.requires_grad_(False)
model.clip_model.encoder.layers[-1].requires_grad_(True)
accelerator.print(f"number of parameters: {count_number_of_parameters(model):,}")
accelerator.print(f"number of trainable parameters: {count_number_of_parameters(model, only_trainable=True):,}")
#log model and optimizer paramneters to wandb
accelerator.init_trackers(project_name="kosmos")
train_loader = iter(train_dataloader)
epoch_loss=0
total_loss=0
start_time = time.time()
with Progress() as progress:
task = progress.add_task("[red]Training...", total=args.max_steps)
for step in range(0, args.max_steps):
batch_start = time.time()
batch = next(train_loader)
outputs = model(**batch, self_attn_padding_mask=batch["attention_mask"])
#shift so that tokens < n predict n
outputs = torch.cat([outputs[:, :1], outputs[:, 67:]], dim=1).contigous()
#shift_logits = outputs[..., :-1, :].contigous()
# shift_labels=batch["labels"][..., 1:].contigous()
#flatten the tokens
loss_fct = CrossEntropyLoss()
one_hot_labels = torch.nn.functional.one_hot(batch["labels"][:, 1:], num_classes=32002).float()
loss = loss_fct(outputs[:, :-1], one_hot_labels)
epoch_loss += loss.detach().float()
accelerator.backward(loss)
optimizer.step()
optimizer.zero_grad()
batch_end = time.time()
logs = {
"loss": loss.items(),
"perplexity": torch.exp(loss).item(),
"lr": lr_scheduler.get_last_lr()[0],
"examples": args.batch_size * (step + 1),
"examples_per_second": args.batch_size / (batch_end - batch_start),
}
if step % args.log_every == args.log_every - 1:
accelerator.log(logs, step=step)
progress.update(task, advance=1, description=f"Step Loss: {loss.item():.5f} "
f"| Mean Loss: {(total_loss + epoch_loss) / step:.5f} "
f"| Mean PPL: {torch.exp((total_loss + epoch_loss) / step):.2f} "
f"| Examples: {args.batch_size * (step + 1)} "
f"| Examples/s: {args.batch_size / (batch_end - batch_start):.2f} "
f"| Elapsed: {time.strftime('%H:%M:%S', time.gmtime(time.time() - start_time))}")
if step % args.save_every == args.save_every - 1:
train_epoch_loss = epoch_loss / args.save_every
total_loss += epoch_loss
epoch_loss = 0
accelerator.log({
"train_ppl": torch.exp(train_epoch_loss),
"train_epoch_loss": train_epoch_loss,
}, step=step)
progress.print(f"Saving checkpoint at step {step}...")
accelerator.save_state(
f"{args.checkpoint_dir}/checkpoint_at_step_{step}/")
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--checkpoint_dir", type=str, default="checkpoints")
parser.add_argument("--learning_rate", type=float, default=1e-5)
parser.add_argument("--weight_decay", type=float, default=0.01)
parser.add_argument("--warmup_steps", type=int, default=0)
parser.add_argument("--max_steps", type=int, default=100000)
parser.add_argument("--batch_size", type=int, default=4)
parser.add_argument("--log_every", type=int, default=1)
parser.add_argument("--save_every", type=int, default=100)
parser.add_argument("--seed", type=int, default=None)
args = parser.parse_args()
train(args) | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/examples/kosmos/train_kosmos.py | train_kosmos.py |
import torch
from torchscale.architecture.config import DecoderConfig
from torchscale.architecture.decoder import Decoder
from torchscale.component.embedding import PositionalEmbedding
from transformers import T5Tokenizer, CLIPProcessor, CLIPModel
from flamingo_pytorch import PerceiverResampler
from PIL import Image
from torch.nn import Embedding, Module
import bitsandbytes as bnb
class KomosTokenizer:
def __init__(self):
self.processor = CLIPProcessor.from_pretrained('laion/CLIP-ViT-L-14-laion2B-s32B-b82k')
#t5 uses sentience piece tokenizer
self.tokenize = T5Tokenizer.from_pretrained(
"t5-large",
additional_special_tokens=[""],
extra_ids=0,
model_max_length=1984
)
self.im_idx, self.im_end_idx = self.tokenizer.convert_tokens_to_ids([""])
def tokenize_texts(self, texts):
texts = self.tokenizer(texts, return_tensors="pt", padding=True, truncation=True).input_ids
#add image tokens to text as "<s>  text </s>"
image_tokens = torch.tensor([[self.im_idx, self.im_end_idx]] * texts.shape[0])
return torch.cat([texts[:, 0:1], image_tokens, texts[:, 1:]], dim=1), texts
def tokenize_images(self, images):
return self.processor(images=images, return_tensors="pt").pixel_values
def tokenize(self, sample):
text_tokens, only_text_tokens = self.tokenize_texts(sample["target_text"])
attention_mask = text_tokens != self.tokenizer.pad_token_id
dummy_image_features = torch.ones((text_tokens.shape[0], 64))
attention_mask = torch.cat([dummy_image_features, attention_mask], dim=1)
return {
"text_tokens": text_tokens,
"images": self.tokenize_images(sample["image"]),
"labels": only_text_tokens,
"attention_mask": attention_mask,
}
class Kosmos(Module):
def __init__(self):
#instantiate clip vit
self.clip_model = CLIPModel.from_pretrained("laion/CLIP-ViT-L-14-laion2B-s32B-b82K").vision_model
self.embed = bnb.nn.Embedding(
32002,
2048,
padding_idx=1
)
self.embed_positions = PositionalEmbedding(
2048,
2048,
1
)
self.output_projection = torch.nn.Linear(
2048, 32002, bias=False
)
torch.nn.init.normal_(
self.output_projection.weight, mean=9, std=2048**-0.5
)
#config
self.config = DecoderConfig(
decoder_layers = 24,
decoder_embed_dim=2048,
decoder_ffn_embed_dim=8192,
decoder_attention_heads=32,
dropout=0.1,
activation_fn="flashattention",
use_xmoe=True,
attention_dropout=0.1,
vocab_size=32002,
subln=True,
xpos_rel_pos=True,
max_rel_pos=2048
)
self.decoder = Decoder(
self.config,
embed_tokens =self.embed,
embed_positions = self.embed_positions,
output_projections = self.output_projection
)
self.perceive = PerceiverResampler(
dim=1024,
depth=2,
dim_head=64,
heads=8,
num_latents=64,
num_media_embeds=257
)
self.image_proj = torch.nn.Linear(1024, 2048, bias=False)
torch.nn.init.normal_(
self.image_proj.weight, mean=0, std=2048**-0.5
)
def forward(self, text_tokens, images, **kwargs):
images = self.clip_model(pixel_values=images)["last_hidden_state"]
images = self.percieve(images).squeeze(1)
images = self.image_proj(images)
model_input = self.decoder.forward_embedding(text_tokens)[1]
model_input = torch.cat([model_input[:, 0:2], images, model_input[:, 2:]], dim=1)
model_input = self.decoder.forward_embedding(model_input, token_embedding=model_input)[0]
return self.decoder(model_input, passed_x=model_input)[0] | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/examples/kosmos/kosmos.py | kosmos.py |
import math
import torch
import torch.nn.functional as F
from torch import nn
try:
from apex.normalization import FusedLayerNorm as LayerNorm
except ModuleNotFoundError:
from torch.nn import LayerNorm
from .multiway_network import MultiwayWrapper
from .xpos_relative_position import XPOS
class MultiheadAttention(nn.Module):
def __init__(
self,
args,
embed_dim,
num_heads,
dropout=0.0,
self_attention=False,
encoder_decoder_attention=False,
subln=False,
):
super().__init__()
self.args = args
self.embed_dim = embed_dim
self.num_heads = num_heads
self.head_dim = embed_dim // num_heads
self.scaling = self.head_dim**-0.5
self.self_attention = self_attention
self.encoder_decoder_attention = encoder_decoder_attention
assert self.self_attention ^ self.encoder_decoder_attention
self.k_proj = MultiwayWrapper(args, nn.Linear(embed_dim, embed_dim, bias=True))
self.v_proj = MultiwayWrapper(args, nn.Linear(embed_dim, embed_dim, bias=True))
self.q_proj = MultiwayWrapper(args, nn.Linear(embed_dim, embed_dim, bias=True))
self.out_proj = MultiwayWrapper(
args, nn.Linear(embed_dim, embed_dim, bias=True)
)
self.inner_attn_ln = (
MultiwayWrapper(args, LayerNorm(self.embed_dim, eps=args.layernorm_eps))
if subln and self.self_attention
else None
)
self.dropout_module = torch.nn.Dropout(dropout)
self.xpos = (
XPOS(self.head_dim, args.xpos_scale_base)
if args.xpos_rel_pos and self.self_attention
else None
)
def reset_parameters(self):
nn.init.xavier_uniform_(self.k_proj.weight, gain=1 / math.sqrt(2))
nn.init.xavier_uniform_(self.v_proj.weight, gain=1 / math.sqrt(2))
nn.init.xavier_uniform_(self.q_proj.weight, gain=1 / math.sqrt(2))
nn.init.xavier_uniform_(self.out_proj.weight)
nn.init.constant_(self.out_proj.bias, 0.0)
def forward(
self,
query,
key,
value,
incremental_state=None,
key_padding_mask=None,
attn_mask=None,
rel_pos=None,
):
bsz, tgt_len, embed_dim = query.size()
src_len = tgt_len
assert embed_dim == self.embed_dim, f"query dim {embed_dim} != {self.embed_dim}"
key_bsz, src_len, _ = key.size()
assert key_bsz == bsz, f"{query.size(), key.size()}"
assert value is not None
assert bsz, src_len == value.shape[:2]
q = self.q_proj(query)
k = self.k_proj(key)
v = self.v_proj(value)
q *= self.scaling
q = q.view(bsz, tgt_len, self.num_heads, self.head_dim).transpose(1, 2)
k = k.view(bsz, src_len, self.num_heads, self.head_dim).transpose(1, 2)
v = v.view(bsz, src_len, self.num_heads, self.head_dim).transpose(1, 2)
q = q.reshape(bsz * self.num_heads, tgt_len, self.head_dim)
k = k.reshape(bsz * self.num_heads, src_len, self.head_dim)
v = v.reshape(bsz * self.num_heads, src_len, self.head_dim)
if incremental_state is not None:
if "prev_key" in incremental_state:
prev_key = incremental_state["prev_key"].view(
bsz * self.num_heads, -1, self.head_dim
)
prev_value = incremental_state["prev_value"].view(
bsz * self.num_heads, -1, self.head_dim
)
k = torch.cat([prev_key, k], dim=1)
v = torch.cat([prev_value, v], dim=1)
incremental_state["prev_key"] = k.view(
bsz, self.num_heads, -1, self.head_dim
)
incremental_state["prev_value"] = v.view(
bsz, self.num_heads, -1, self.head_dim
)
src_len = k.size(1)
if self.xpos is not None:
if incremental_state is not None:
offset = src_len - 1
else:
offset = 0
k = self.xpos(k, offset=0, downscale=True)
q = self.xpos(q, offset=offset, downscale=False)
attn_weights = torch.bmm(q, k.transpose(1, 2))
if attn_mask is not None:
attn_weights = torch.nan_to_num(attn_weights)
attn_mask = attn_mask.unsqueeze(0)
attn_weights += attn_mask
if key_padding_mask is not None:
attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len)
attn_weights = attn_weights.masked_fill(
key_padding_mask.unsqueeze(1).unsqueeze(2).to(torch.bool),
float("-inf"),
)
attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len)
if rel_pos is not None:
rel_pos = rel_pos.view(attn_weights.size())
attn_weights = attn_weights + rel_pos
attn_weights = F.softmax(attn_weights, dim=-1, dtype=torch.float32).type_as(
attn_weights
)
attn_probs = self.dropout_module(attn_weights)
attn = torch.bmm(attn_probs, v)
attn = attn.transpose(0, 1).reshape(tgt_len, bsz, embed_dim).transpose(0, 1)
if self.inner_attn_ln is not None:
attn = self.inner_attn_ln(attn)
attn = self.out_proj(attn)
attn_weights = attn_weights.view(
bsz, self.num_heads, tgt_len, src_len
).transpose(1, 0)
return attn, attn_weights | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/torchscale/component/multihead_attention.py | multihead_attention.py |
import torch
import torch.nn as nn
import torch.nn.functional as F
class VisionLanguageEmbedding(nn.Module):
def __init__(self, text_embed, vision_embed):
super().__init__()
self.text_embed = text_embed
self.vision_embed = vision_embed
def forward(self, textual_tokens, visual_tokens, **kwargs):
if textual_tokens is None:
return self.vision_embed(visual_tokens)
if visual_tokens is None:
return self.text_embed(textual_tokens)
x1 = self.vision_embed(visual_tokens)
x2 = self.text_embed(textual_tokens)
return torch.cat([x1, x2], dim=1)
class VisionEmbedding(nn.Module):
"""Image to Patch Embedding"""
def __init__(
self,
img_size=224,
patch_size=16,
in_chans=3,
embed_dim=768,
contain_mask_token=False,
prepend_cls_token=False,
):
super().__init__()
img_size = (img_size, img_size)
patch_size = (patch_size, patch_size)
num_patches = (img_size[1] // patch_size[1]) * (img_size[0] // patch_size[0])
self.patch_shape = (img_size[0] // patch_size[0], img_size[1] // patch_size[1])
self.img_size = img_size
self.patch_size = patch_size
self.num_patches = num_patches
self.proj = nn.Conv2d(
in_chans, embed_dim, kernel_size=patch_size, stride=patch_size
)
if contain_mask_token:
self.mask_token = nn.Parameter(torch.zeros(1, 1, embed_dim))
else:
self.mask_token = None
if prepend_cls_token:
self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim))
else:
self.cls_token = None
def num_position_embeddings(self):
if self.cls_token is None:
return self.num_patches
else:
return self.num_patches + 1
def forward(self, x, masked_position=None, **kwargs):
B, C, H, W = x.shape
assert (
H == self.img_size[0] and W == self.img_size[1]
), f"Input image size ({H}*{W}) doesn't match model ({self.img_size[0]}*{self.img_size[1]})."
x = self.proj(x).flatten(2).transpose(1, 2)
batch_size, seq_len, _ = x.size()
if masked_position is not None:
assert self.mask_token is not None
mask_token = self.mask_token.expand(batch_size, seq_len, -1)
w = masked_position.unsqueeze(-1).type_as(mask_token)
x = x * (1 - w) + mask_token * w
if self.cls_token is not None:
cls_tokens = self.cls_token.expand(
batch_size, -1, -1
) # stole cls_tokens impl from Phil Wang, thanks
x = torch.cat((cls_tokens, x), dim=1)
return x
class TextEmbedding(nn.Embedding):
def reset_parameters(self):
nn.init.normal_(self.weight, mean=0, std=self.embedding_dim**-0.5)
self._fill_padding_idx_with_zero()
class PositionalEmbedding(nn.Embedding):
def forward(
self,
x,
positions=None,
**kwargs,
):
if positions is None:
# being consistent with Fairseq, which starts from 2.
positions = (
torch.arange(2, x.size(1) + 2, device=x.device).long().unsqueeze(0)
)
return F.embedding(
positions,
self.weight,
self.padding_idx,
self.max_norm,
self.norm_type,
self.scale_grad_by_freq,
self.sparse,
) | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/torchscale/component/embedding.py | embedding.py |
import numpy as np
import torch
import torch.nn as nn
def fixed_pos_embedding(x):
seq_len, dim = x.shape
inv_freq = 1.0 / (10000 ** (torch.arange(0, dim) / dim))
sinusoid_inp = (
torch.einsum("i , j -> i j", torch.arange(0, seq_len, dtype=torch.float), inv_freq).to(x)
)
return torch.sin(sinusoid_inp), torch.cos(sinusoid_inp)
def rotate_every_two(x):
x1 = x[:, :, ::2]
x2 = x[:, :, 1::2]
x = torch.stack((-x2, x1), dim=-1)
return x.flatten(-2) # in einsum notation: rearrange(x, '... d j -> ... (d j)')\
def duplicate_interleave(m):
"""
A simple version of `torch.repeat_interleave` for duplicating a matrix while interleaving the copy.
"""
dim0 = m.shape[0]
m = m.view(-1, 1) # flatten the matrix
m = m.repeat(1, 2) # repeat all elements into the 2nd dimension
m = m.view(dim0, -1) # reshape into a matrix, interleaving the copy
return m
def apply_rotary_pos_emb(x, sin, cos, scale=1):
sin, cos = map(lambda t: duplicate_interleave(t * scale), (sin, cos))
# einsum notation for lambda t: repeat(t[offset:x.shape[1]+offset,:], "n d -> () n () (d j)", j=2)
return (x * cos) + (rotate_every_two(x) * sin)
class XPOS(nn.Module):
def __init__(
self, head_dim, scale_base=512
):
super().__init__()
self.head_dim = head_dim
self.scale_base = scale_base
self.register_buffer(
"scale", (torch.arange(0, head_dim, 2) + 0.4 * head_dim) / (1.4 * head_dim)
)
def forward(self, x, offset=0, downscale=False):
length = x.shape[1]
min_pos = -(length + offset) // 2
max_pos = length + offset + min_pos
scale = self.scale ** torch.arange(min_pos, max_pos, 1).to(self.scale).div(self.scale_base)[:, None]
sin, cos = fixed_pos_embedding(scale)
if scale.shape[0] > length:
scale = scale[-length:]
sin = sin[-length:]
cos = cos[-length:]
if downscale:
scale = 1 / scale
x = apply_rotary_pos_emb(x, sin, cos, scale)
return x | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/torchscale/component/xpos_relative_position.py | xpos_relative_position.py |
import math
import torch
import torch.nn as nn
class RelativePositionBias(nn.Module):
def __init__(
self, bidirectional=True, num_buckets=32, max_distance=128, n_heads=12
):
super().__init__()
self.bidirectional = bidirectional
self.num_buckets = num_buckets
self.max_distance = max_distance
self.n_heads = n_heads
self.relative_attention_bias = nn.Embedding(self.num_buckets, self.n_heads)
@staticmethod
def _relative_position_bucket(
relative_position, bidirectional=True, num_buckets=32, max_distance=128
):
ret = 0
n = -relative_position
if bidirectional:
num_buckets //= 2
ret += (n < 0).to(torch.long) * num_buckets
n = torch.abs(n)
else:
n = torch.max(n, torch.zeros_like(n))
max_exact = num_buckets // 2
is_small = n < max_exact
val_if_large = max_exact + (
torch.log(n.float() / max_exact)
/ math.log(max_distance / max_exact)
* (num_buckets - max_exact)
).to(torch.long)
val_if_large = torch.min(
val_if_large, torch.full_like(val_if_large, num_buckets - 1)
)
ret += torch.where(is_small, n, val_if_large)
return ret
def compute_bias(self, qlen, klen, step=None):
step = 0 if step is None else step
context_position = torch.arange(
step,
step + qlen,
dtype=torch.long,
device=self.relative_attention_bias.weight.device,
)[:, None]
memory_position = torch.arange(
klen, dtype=torch.long, device=self.relative_attention_bias.weight.device
)[None, :]
relative_position = memory_position - context_position # shape (qlen, klen)
rp_bucket = self._relative_position_bucket(
relative_position, # shape (qlen, klen)
bidirectional=self.bidirectional,
num_buckets=self.num_buckets,
max_distance=self.max_distance,
)
rp_bucket = rp_bucket.to(self.relative_attention_bias.weight.device)
values = self.relative_attention_bias(
rp_bucket
) # shape (qlen, klen, num_heads)
values = values.permute([2, 0, 1]).unsqueeze(
0
) # shape (1, num_heads, qlen, klen)
return values
def forward(self, batch_size, qlen, klen, step=None):
# shape (batch * num_heads, qlen, klen)
return (
self.compute_bias(qlen, klen, step)
.repeat(batch_size, 1, 1, 1)
.view(-1, qlen, klen)
) | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/torchscale/component/relative_position_bias.py | relative_position_bias.py |
import torch
import torch.nn as nn
import torch.nn.functional as F
from flash_attn.flash_attention import FlashMHA
try:
from apex.normalization import FusedLayerNorm as LayerNorm
except ModuleNotFoundError:
from torch.nn import LayerNorm
class set_torch_seed(object):
def __init__(self, seed):
assert isinstance(seed, int)
self.rng_state = self.get_rng_state()
torch.manual_seed(seed)
if torch.cuda.is_available():
torch.cuda.manual_seed(seed)
def get_rng_state(self):
state = {"torch_rng_state": torch.get_rng_state()}
if torch.cuda.is_available():
state["cuda_rng_state"] = torch.cuda.get_rng_state()
return state
def set_rng_state(self, state):
torch.set_rng_state(state["torch_rng_state"])
if torch.cuda.is_available():
torch.cuda.set_rng_state(state["cuda_rng_state"])
def __enter__(self):
return self
def __exit__(self, *exc):
self.set_rng_state(self.rng_state)
def make_experts(args, embed_dim, expert_ffn_dim):
world_size = (
1
if not torch.distributed.is_initialized()
else torch.distributed.get_world_size()
)
expert_list = []
ddp_rank = args.ddp_rank
start_seed = torch.randint(1000000, (1,)).item()
# at least as many experts than gpus
if args.moe_expert_count >= world_size:
assert (
args.moe_expert_count % world_size == 0
), f"{args.moe_expert_count}, {world_size}"
local_moe_expert_count = args.moe_expert_count // world_size
for i in range(local_moe_expert_count):
with set_torch_seed(start_seed + ddp_rank * local_moe_expert_count + i):
expert_list.append(
FeedForwardNetwork(
embed_dim,
expert_ffn_dim,
args.activation_fn,
args.dropout,
args.activation_dropout,
args.layernorm_eps,
args.subln,
)
)
else:
assert (
world_size % args.moe_expert_count == 0
), f"{world_size}, {args.moe_expert_count}"
with set_torch_seed(start_seed + ddp_rank % args.moe_expert_count):
expert_list.append(
FeedForwardNetwork(
embed_dim,
expert_ffn_dim,
args.activation_fn,
args.dropout,
args.activation_dropout,
args.layernorm_eps,
args.subln,
)
)
experts = nn.ModuleList(expert_list)
return experts
def get_activation_fn(activation):
if activation == "relu":
return F.relu
elif activation == "gelu":
return F.gelu
elif activation == "flashattention":
return FlashMHA
else:
raise NotImplementedError
class FeedForwardNetwork(nn.Module):
def __init__(
self,
embed_dim,
ffn_dim,
activation_fn,
dropout,
activation_dropout,
layernorm_eps,
subln=False,
):
super().__init__()
self.embed_dim = embed_dim
self.activation_fn = get_activation_fn(activation=str(activation_fn))
self.activation_dropout_module = torch.nn.Dropout(activation_dropout)
self.dropout_module = torch.nn.Dropout(dropout)
self.fc1 = nn.Linear(self.embed_dim, ffn_dim)
self.fc2 = nn.Linear(ffn_dim, self.embed_dim)
self.ffn_layernorm = LayerNorm(ffn_dim, eps=layernorm_eps) if subln else None
def reset_parameters(self):
self.fc1.reset_parameters()
self.fc2.reset_parameters()
if self.ffn_layernorm is not None:
self.ffn_layernorm.reset_parameters()
def forward(self, x):
x_shape = x.shape
x = x.reshape(-1, x.size(-1))
x = self.fc1(x)
x = self.activation_fn(x.float()).type_as(x)
x = self.activation_dropout_module(x)
if self.ffn_layernorm is not None:
x = self.ffn_layernorm(x)
x = self.fc2(x)
x = x.view(x_shape)
x = self.dropout_module(x)
return x | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/torchscale/component/feedforward_network.py | feedforward_network.py |
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
# Implementation of Top2Gating described in https://arxiv.org/pdf/2006.16668.pdf
# Code is inspired by Top2GatingOnLogits from lingvo:
# https://github.com/tensorflow/lingvo/blob/21b8106c5f1d30a196c98eedc441d4fd70833b11/lingvo/core/moe_layers.py#L477
# NOTE: This is a mirror of the code in
# https://github.com/facebookresearch/fairscale/tree/master/fairscale/nn/moe
import math
from typing import Callable, Dict, Optional, Tuple
import torch
import torch.nn.functional as F
from torch import Tensor
from .moe_layer import fused_cumsum_sub_one, has_tutel
# use a fixed temperature to compute balance loss
TEMPERATURE_FOR_L_UAX = 0.07
# maximum capacity of 1 expert as a fraction of number of tokens in the batch
# Note: setting this to 1.0 causes inference to significantly slow down
EVAL_CAPACITY_TOKEN_FRACTION = 0.25
# logging
SAMPLE_FRACTION = 0.2
def top1gating(
logits: torch.Tensor,
input_mask: Optional[torch.Tensor] = None,
use_fp32=False,
capacity_factor=1.0,
eval_mode=False,
moe_eval_capacity_token_fraction=EVAL_CAPACITY_TOKEN_FRACTION,
use_xmoe=False,
gate_obj=None,
) -> Tuple[Tensor, Tensor, Tensor, Dict]:
"""Implements Top2Gating on logits."""
metadata = {}
if use_fp32:
orig_dtype = logits.dtype
logits = logits.float()
gates = F.softmax(logits, dim=1)
metadata["entropy_gating"] = entropy(probs=gates).mean().detach()
# gates has shape of SE
num_tokens = gates.shape[0]
num_experts = gates.shape[1]
if moe_eval_capacity_token_fraction > 0.0 and eval_mode:
capacity = math.ceil(moe_eval_capacity_token_fraction * num_tokens)
else:
# capacity = capacity_factor * S/E
capacity = int(capacity_factor * math.ceil(num_tokens / num_experts))
# Create a mask for 1st's expert per token
indices1_s = torch.argmax(gates, dim=1)
mask1 = one_hot(indices1_s, num_classes=num_experts, unsqueeze_indices=True)
if input_mask is not None and input_mask.any():
nonpadding = ~input_mask
mask1 = mask1 * nonpadding.unsqueeze(-1).to(mask1.dtype)
# for logging (percent of tokens routed to each expert)
expert1_hist = (
100
* torch.histc(
(indices1_s.squeeze() + 1), bins=num_experts, min=1, max=num_experts
)
/ num_tokens
)
metadata["unused_expert1_count"] = (expert1_hist == 0).sum()
expert1_hist = (
torch.sort(expert1_hist, dim=0, descending=True).values
+ torch.finfo(torch.float32).tiny
)
sample_count = max(math.ceil(num_experts * SAMPLE_FRACTION), 1)
metadata["expert1_balance_top"] = expert1_hist[:sample_count].sum()
metadata["expert1_balance_bottom"] = expert1_hist[-sample_count:].sum()
gates1_s = (gates * mask1).sum(dim=1)
# Compute locations in capacity buffer
locations1 = fused_cumsum_sub_one(mask1)
# Compute l_aux
me = torch.mean(gates, dim=0)
ce = torch.mean(mask1.to(gates.dtype), dim=0)
l_aux = torch.mean(me * ce)
l_aux = l_aux * num_experts * num_experts
if has_tutel:
locations1_s = torch.sum(locations1 * mask1, dim=1)
return (
l_aux,
metadata,
capacity,
num_experts,
[
indices1_s,
],
[
locations1_s,
],
[
gates1_s,
],
)
# Remove locations outside capacity from mask
mask1 = mask1 * torch.lt(locations1, capacity)
# Store the capacity location for each token
locations1_s = torch.sum(locations1 * mask1, dim=1)
# Calculate combine_weights and dispatch_mask
gates1 = gates1_s.unsqueeze(-1) * mask1.to(gates1_s.dtype) # einsum("s,se->se")
# locations1_sc = num_tokens * capacity
locations1_sc = one_hot(locations1_s, num_classes=capacity, unsqueeze_indices=True)
combine1_sec = torch.bmm(
# einsum("se,sc->sec")
gates1.unsqueeze(-1),
locations1_sc.to(gates1.dtype).unsqueeze(1),
)
dispatch_mask = combine1_sec.bool()
if use_fp32:
return l_aux, combine1_sec.to(orig_dtype), dispatch_mask, metadata
else:
return l_aux, combine1_sec, dispatch_mask, metadata
class Top1Gate(torch.nn.Module):
"""Gate module which implements Top2Gating as described in Gshard_.
::
gate = Top2Gate(model_dim, num_experts)
l_aux, combine_weights, dispatch_mask = gate(input)
.. Gshard_: https://arxiv.org/pdf/2006.16668.pdf
Args:
model_dim (int):
size of model embedding dimension
num_experts (ints):
number of experts in model
"""
wg: torch.nn.Linear
def __init__(
self,
model_dim: int,
num_experts: int,
use_fp32=False,
input_noise_type=None,
capacity_factor=1.0,
moe_eval_capacity_token_fraction=EVAL_CAPACITY_TOKEN_FRACTION,
use_xmoe=False,
) -> None:
# TODO: merge this to top2gate.py
#
super().__init__()
if not use_xmoe:
self.wg = torch.nn.Linear(model_dim, num_experts, bias=False)
else:
self.wg_reduction = torch.nn.Linear(model_dim, 16, bias=False)
wg = torch.empty(num_experts, 16)
torch.nn.init.orthogonal_(wg, gain=0.32)
self.register_parameter("wg", torch.nn.Parameter(wg))
self.use_xmoe = use_xmoe
self.use_fp32 = use_fp32
self.input_noise_type = input_noise_type
self.capacity_factor = capacity_factor
self.moe_eval_capacity_token_fraction = moe_eval_capacity_token_fraction
def forward(self, input, mask=None): # type: ignore
if self.use_xmoe:
input = self.wg_reduction(input)
with torch.no_grad():
wg_norm = self.wg.norm(p=2.0, dim=1, keepdim=True)
self.wg.mul_(1.5 / wg_norm)
logits = self._cosine(input, self.wg)
logits = self._make_finite(logits)
else:
logits = self.wg(input)
return top1gating(
logits,
mask,
use_fp32=self.use_fp32,
capacity_factor=self.capacity_factor,
eval_mode=not self.training,
moe_eval_capacity_token_fraction=self.moe_eval_capacity_token_fraction,
use_xmoe=self.use_xmoe,
gate_obj=self,
)
def _make_finite(self, scores):
ok = scores.isfinite()
if not ok.all():
# NaNs here can break the assignment algorithm
scores[~ok] = scores[ok].min()
return scores
def _get_gating_temperature(self, eps=1e-4):
if self.gating_t.data.item() < eps:
return eps
return self.gating_t
def _cosine(self, mat1, mat2, eps=1e-4):
assert mat1.dim() == 2
assert mat2.dim() == 2
# mat1 = F.normalize(mat1, p=2.0, dim=1, eps=eps)
mat2 = F.normalize(mat2.float(), p=2.0, dim=1, eps=eps)
return mat1.float().matmul(mat2.transpose(0, 1)).type_as(mat1)
gumbel_map: Dict[torch.device, Callable] = {}
def gumbel_rsample(shape: Tuple, device: torch.device) -> Tensor:
gumbel = gumbel_map.get(device)
if gumbel is None:
one = torch.tensor(1.0, device=device)
zero = torch.tensor(0.0, device=device)
gumbel = torch.distributions.gumbel.Gumbel(zero, one).rsample # type: ignore
gumbel_map[device] = gumbel
return gumbel(shape)
def one_hot(indices: torch.Tensor, num_classes: int, unsqueeze_indices=False) -> Tensor:
if unsqueeze_indices:
indices = indices.unsqueeze(-1)
assert indices.shape[-1] == 1, "last dimension of indices must be have size 1"
output = torch.zeros(
indices.shape[:-1] + (num_classes,), device=indices.device, dtype=indices.dtype
)
output.scatter_(len(output.shape) - 1, indices, 1)
return output
def entropy(probs):
logits = torch.distributions.utils.probs_to_logits(probs)
p_log_p = probs * logits
return -p_log_p.sum(-1)
def top2gating(
logits: torch.Tensor,
input_mask: Optional[torch.Tensor] = None,
use_fp32=False,
second_expert_policy="sampling",
normalize_gate_prob_before_dropping=False,
eval_mode=False,
moe_eval_capacity_token_fraction=0.25,
batch_prioritized_routing=False,
) -> Tuple[Tensor, Tensor, Tensor]:
"""Implements Top2Gating on logits."""
metadata = {}
if use_fp32:
orig_dtype = logits.dtype
logits = logits.float()
gates = F.softmax(logits, dim=1)
metadata["entropy_gating"] = entropy(probs=gates).mean().detach()
# gates has shape of SE
num_tokens = gates.shape[0]
num_experts = gates.shape[1]
if moe_eval_capacity_token_fraction > 0.0 and eval_mode:
capacity = math.ceil(moe_eval_capacity_token_fraction * num_tokens)
else:
# capacity = 2S/E
capacity = 2 * math.ceil(num_tokens / num_experts)
# Create a mask for 1st's expert per token
indices1_s = torch.argmax(gates, dim=1, keepdim=True)
mask1 = one_hot(indices1_s, num_experts)
if second_expert_policy == "sampling":
# Create a mask for 2nd's expert per token using Gumbel-max trick
# https://timvieira.github.io/blog/post/2014/07/31/gumbel-max-trick/
logits_w_noise = logits + gumbel_rsample(logits.shape, device=logits.device)
else:
logits_w_noise = logits
# Replace top-expert with min value
logits_except1 = logits_w_noise.masked_fill(mask1.bool(), float("-inf"))
indices2_s = torch.argmax(logits_except1, dim=1, keepdim=True)
mask2 = one_hot(indices2_s, num_experts)
gates1_s = (gates * mask1).sum(dim=1)
gates2_s = (gates * mask2).sum(dim=1)
if normalize_gate_prob_before_dropping:
# Normalize gate probabilities
denom_s = gates1_s + gates2_s
# Avoid divide-by-zero
denom_s = torch.clamp(denom_s, min=torch.finfo(denom_s.dtype).eps)
gates1_s = gates1_s / denom_s
gates2_s = gates2_s / denom_s
if second_expert_policy == "random":
sampled = (2 * gates2_s) > torch.rand_like(gates2_s)
mask2 = mask2 * sampled.repeat(num_experts, 1).transpose(1, 0)
# Compute locations in capacity buffer
if input_mask is not None and input_mask.any():
nonpadding = ~input_mask
mask1 = mask1 * nonpadding.unsqueeze(-1).to(mask1.dtype)
mask2 = mask2 * nonpadding.unsqueeze(-1).to(mask1.dtype)
if batch_prioritized_routing:
# if batch_prioritized_routing:
importance_scores = -1 * gates.max(dim=1)[0]
sorted_mask1 = mask1[importance_scores.argsort(dim=0)]
sorted_cumsum1 = fused_cumsum_sub_one(sorted_mask1) * sorted_mask1
importance_sorted_locations1 = sorted_cumsum1[
importance_scores.argsort(dim=0).argsort(dim=0)
]
sorted_mask2 = mask2[importance_scores.argsort(dim=0)]
sorted_cumsum2 = fused_cumsum_sub_one(sorted_mask2) * sorted_mask2
importance_sorted_locations2 = sorted_cumsum2[
importance_scores.argsort(dim=0).argsort(dim=0)
]
importance_sorted_locations2 += torch.sum(mask1, dim=0, keepdim=True)
locations1, locations2 = (
importance_sorted_locations1,
importance_sorted_locations2,
)
else:
locations1 = fused_cumsum_sub_one(mask1)
locations2 = fused_cumsum_sub_one(mask2)
# Update 2nd's location by accounting for locations of 1st
locations2 += torch.sum(mask1, dim=0, keepdim=True)
# Compute l_aux
me = torch.mean(gates, dim=0)
ce = torch.mean(mask1.to(gates.dtype), dim=0)
l_aux = torch.mean(me * ce)
l_aux = l_aux * num_experts * num_experts
# for logging purposes
metadata["overflow_expert1"] = (
100 * torch.sum(mask1 * torch.ge(locations1, capacity)) / torch.sum(mask1)
)
metadata["overflow_expert2"] = (
100 * torch.sum(mask2 * torch.ge(locations2, capacity)) / torch.sum(mask2)
)
# Remove locations outside capacity from mask
mask1_, mask2_ = mask1, mask2
mask1 = mask1 * torch.lt(locations1, capacity)
mask2 = mask2 * torch.lt(locations2, capacity)
# for logging (percent of tokens routed to each expert)
expert1_hist = (
100
* torch.histc(
(indices1_s.squeeze() + 1), bins=num_experts, min=1, max=num_experts
)
/ num_tokens
)
metadata["unused_expert1_count"] = (expert1_hist == 0).sum()
expert1_hist = (
torch.sort(expert1_hist, dim=0, descending=True).values
+ torch.finfo(torch.float32).tiny
)
expert2_hist = (
100
* torch.histc(
(indices2_s.squeeze() + 1), bins=num_experts, min=1, max=num_experts
)
/ num_tokens
)
metadata["unused_expert2_count"] = (expert2_hist == 0).sum()
expert2_hist = (
torch.sort(expert2_hist, dim=0, descending=True).values
+ torch.finfo(torch.float32).tiny
)
sample_count = max(math.ceil(num_experts * SAMPLE_FRACTION), 1)
metadata["expert1_balance_top"] = expert1_hist[:sample_count].sum()
metadata["expert1_balance_bottom"] = expert1_hist[-sample_count:].sum()
metadata["expert2_balance_top"] = expert2_hist[:sample_count].sum()
metadata["expert2_balance_bottom"] = expert2_hist[-sample_count:].sum()
if not normalize_gate_prob_before_dropping:
# Normalize gate probabilities
gates1_s = (gates * mask1).sum(dim=1)
gates2_s = (gates * mask2).sum(dim=1)
denom_s = gates1_s + gates2_s
# Avoid divide-by-zero
denom_s = torch.clamp(denom_s, min=torch.finfo(denom_s.dtype).eps)
gates1_s /= denom_s
gates2_s /= denom_s
if has_tutel:
locations1_s = torch.sum(locations1 * mask1_, dim=1)
locations2_s = torch.sum(locations2 * mask2_, dim=1)
return (
l_aux,
metadata,
capacity,
num_experts,
[indices1_s, indices2_s],
[locations1_s, locations2_s],
[gates1_s, gates2_s],
)
# Store the capacity location for each token
locations1_s = torch.sum(locations1 * mask1, dim=1)
locations2_s = torch.sum(locations2 * mask2, dim=1)
# Calculate combine_weights and dispatch_mask
gates1 = gates1_s.unsqueeze(-1) * mask1.to(gates1_s.dtype) # einsum("s,se->se")
gates2 = gates2_s.unsqueeze(-1) * mask2.to(gates2_s.dtype) # einsum("s,se->se")
locations1_sc = one_hot(locations1_s, num_classes=capacity, unsqueeze_indices=True)
locations2_sc = one_hot(locations2_s, num_classes=capacity, unsqueeze_indices=True)
combine1_sec = torch.bmm(
# einsum("se,sc->sec")
gates1.unsqueeze(-1),
locations1_sc.to(gates1.dtype).unsqueeze(1),
)
combine2_sec = torch.bmm(
# einsum("se,sc->sec")
gates2.unsqueeze(-1),
locations2_sc.to(gates2.dtype).unsqueeze(1),
)
combine_weights = combine1_sec + combine2_sec
dispatch_mask = combine_weights.bool()
if use_fp32:
return l_aux, combine_weights.to(orig_dtype), dispatch_mask, metadata
else:
return l_aux, combine_weights, dispatch_mask, metadata
class Top2Gate(torch.nn.Module):
"""Gate module which implements Top2Gating as described in Gshard_.
::
gate = Top2Gate(model_dim, num_experts)
l_aux, combine_weights, dispatch_mask = gate(input)
.. Gshard_: https://arxiv.org/pdf/2006.16668.pdf
Args:
model_dim (int):
size of model embedding dimension
num_experts (ints):
number of experts in model
"""
wg: torch.nn.Linear
def __init__(
self,
model_dim: int,
num_experts: int,
use_fp32=False,
second_expert_policy="sampling",
normalize_gate_prob_before_dropping=False,
moe_eval_capacity_token_fraction=0.25,
batch_prioritized_routing=False,
use_xmoe=False,
) -> None:
super().__init__()
if not use_xmoe:
self.wg = torch.nn.Linear(model_dim, num_experts, bias=False)
else:
self.wg_reduction = torch.nn.Linear(model_dim, 16, bias=False)
wg = torch.empty(num_experts, 16)
torch.nn.init.orthogonal_(wg, gain=0.32)
self.register_parameter("wg", torch.nn.Parameter(wg))
self.use_fp32 = use_fp32
self.second_expert_policy = second_expert_policy
self.normalize_gate_prob_before_dropping = normalize_gate_prob_before_dropping
self.moe_eval_capacity_token_fraction = moe_eval_capacity_token_fraction
self.batch_prioritized_routing = batch_prioritized_routing
self.use_xmoe = use_xmoe
def forward(self, input, mask=None): # type: ignore
if self.use_xmoe:
input = self.wg_reduction(input)
with torch.no_grad():
wg_norm = self.wg.norm(p=2.0, dim=1, keepdim=True)
self.wg.mul_(1.5 / wg_norm)
logits = self._cosine(input, self.wg)
logits = self._make_finite(logits)
else:
logits = self.wg(input)
return top2gating(
logits,
mask,
use_fp32=self.use_fp32,
second_expert_policy=self.second_expert_policy,
normalize_gate_prob_before_dropping=self.normalize_gate_prob_before_dropping,
eval_mode=not self.training,
moe_eval_capacity_token_fraction=self.moe_eval_capacity_token_fraction,
batch_prioritized_routing=self.batch_prioritized_routing,
)
def _cosine(self, mat1, mat2, eps=1e-4):
assert mat1.dim() == 2
assert mat2.dim() == 2
# mat1 = F.normalize(mat1, p=2.0, dim=1, eps=eps)
mat2 = F.normalize(mat2.float(), p=2.0, dim=1, eps=eps)
return mat1.float().matmul(mat2.transpose(0, 1)).type_as(mat1)
def _make_finite(self, scores):
ok = scores.isfinite()
if not ok.all():
# NaNs here can break the assignment algorithm
scores[~ok] = scores[ok].min()
return scores | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/torchscale/component/xmoe/routing.py | routing.py |
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
# NOTE: This is a mirror of the code in
# https://github.com/facebookresearch/fairscale/tree/master/fairscale/nn/moe
import logging
import time
from typing import Any, Tuple, cast
import torch
import torch.distributed as dist
from torch import Tensor
from torch.nn import Module, ModuleList
try:
from fairseq.modules.moe import MOELayer
has_fairseq = True
Base = MOELayer
except ModuleNotFoundError:
Base = Module
has_fairseq = False
try:
# To enable Tutel MoE optimizations:
# python3 -m pip install --user --upgrade git+https://github.com/microsoft/tutel@v0.1.x
from tutel import moe as tutel_moe
has_tutel, fused_cumsum_sub_one = True, tutel_moe.fast_cumsum_sub_one
except ModuleNotFoundError:
has_tutel, fused_cumsum_sub_one = False, lambda mask: torch.cumsum(mask, dim=0) - 1
logger = logging.getLogger(__name__)
# einsum dimensions: (g)roup, (s)equence, (e)xpert, (m)odel, (c)apacity
# See https://arxiv.org/pdf/2006.16668.pdf for details.
# Based on https://github.com/pytorch/pytorch/pull/40762
class _AllToAll(torch.autograd.Function):
@staticmethod
def forward(ctx: Any, group: dist.ProcessGroup, input: Tensor) -> Tensor: # type: ignore
ctx.group = group
input = input.contiguous()
output = torch.empty_like(input)
if torch.distributed.is_initialized():
dist.all_to_all_single(output, input, group=group)
else:
assert group is None
output = input
return output
@staticmethod
def backward(ctx: Any, *grad_output: Tensor) -> Tuple[None, Tensor]:
return (None, _AllToAll.apply(ctx.group, *grad_output))
def _find_my_group_index(grouped_ranks):
my_rank = dist.get_rank()
for i, group in enumerate(grouped_ranks):
if my_rank in group:
return i
raise RuntimeError
def get_moe_group(moe_expert_count):
if dist.is_initialized():
if not hasattr(get_moe_group, "_moe_groups"):
world_size = dist.get_world_size()
if world_size <= moe_expert_count:
assert moe_expert_count % world_size == 0
moe_groups = [[i] for i in range(world_size)]
else:
assert world_size % moe_expert_count == 0
ranks_per_group = world_size // moe_expert_count
moe_groups = [
[i + j * moe_expert_count for j in range(ranks_per_group)]
for i in range(moe_expert_count)
]
get_moe_group._moe_group_idx = moe_groups
get_moe_group._moe_groups = [dist.new_group(g) for g in moe_groups]
my_group_idx = _find_my_group_index(get_moe_group._moe_group_idx)
return get_moe_group._moe_groups[my_group_idx]
def get_all2all_group(moe_expert_count):
if dist.is_initialized():
if not hasattr(get_all2all_group, "_all2all_groups"):
world_size = dist.get_world_size()
# more experts than world size
if world_size <= moe_expert_count:
assert moe_expert_count % world_size == 0
all2all_groups = [[i for i in range(world_size)]]
# larger world than num experts
else:
assert world_size % moe_expert_count == 0
ranks_per_group = world_size // moe_expert_count
all2all_groups = [
[i * moe_expert_count + j for j in range(moe_expert_count)]
for i in range(ranks_per_group)
]
get_all2all_group._all2all_group_idx = all2all_groups
get_all2all_group._all2all_groups = [
dist.new_group(g) for g in all2all_groups
]
my_group_idx = _find_my_group_index(get_all2all_group._all2all_group_idx)
return get_all2all_group._all2all_groups[my_group_idx]
class MOELayer(Base):
"""MOELayer module which implements MixtureOfExperts as described in Gshard_.
::
gate = Top2Gate(model_dim, num_experts)
moe = MOELayer(gate, expert)
output = moe(input)
l_aux = moe.l_aux
.. Gshard_: https://arxiv.org/pdf/2006.16668.pdf
Args:
gate (torch.nn.Module):
gate network
expert (torch.nn.Module):
expert network
"""
def __init__(self, gate, experts, args):
if has_fairseq:
super(Base, self).__init__()
else:
super().__init__()
self.gate = gate
if type(experts) == ModuleList:
self.experts = cast(ModuleList, experts)
else:
self.experts = ModuleList([experts])
self.expert_group = get_moe_group(args.moe_expert_count)
self.all2all_group = get_all2all_group(args.moe_expert_count)
self.world_size = dist.get_world_size(group=self.expert_group)
self.all2all_size = dist.get_world_size(group=self.all2all_group)
for p in experts.parameters():
p.expert = True # type: ignore
self.num_local_experts = len(self.experts)
self.args = args
self.in_generation = False
self.a2a_cuda_event_intervals = []
self.a2a_cpu_time_ms = 0.0
def forward(self, *input: Tensor, input_padding_mask=None, **kwargs: Any) -> Tensor:
assert len(input) == 1, "only single input Tensor supported"
input = input[0]
assert (
len(input.shape) == 3
), "input Tensor must have dimensions: (s)equence, (t)oken, (m)odel"
if input_padding_mask is not None:
assert (
len(input_padding_mask.shape) == 2
), "input Tensor must have dimensions: (s)equence, (t)oken"
assert input_padding_mask.shape[0] == input.shape[0]
assert input_padding_mask.shape[1] == input.shape[1]
# assert input.shape[0] % len(self.experts) == 0, "num tokens must be order of number of local experts"
# Implement Algorithm 2 from GShard paper.
d_model = input.shape[2]
# Pad to expected batch size
input_shape = list(input.shape)
expected_bsz = (
getattr(self.args, "batch_size", 0)
if self.training
else getattr(self.args, "batch_size_valid", 0)
)
# This indicates that --batch-size or --max-sentences is not specified
if expected_bsz is None:
expected_bsz = 0
# Note: Padding is not necessary at generation time at present
# because all DDP workers process the same batch. Also, batch size at generation time
# can be different from that present in the checkpoint state
if (
not self.in_generation
and expected_bsz != 0
and input_shape[0] != expected_bsz
):
logger.warning(
f"padding batch with unexpected size {input_shape[0]} (expected: {expected_bsz})"
)
assert input_shape[0] < expected_bsz, f"{input_shape[0]} < {expected_bsz}"
padded_input = torch.zeros(
(expected_bsz, input_shape[1], input_shape[2]),
dtype=input.dtype,
layout=input.layout,
device=input.device,
)
padded_input[: input_shape[0], :, :] = input
input = padded_input
padded_input_padding_mask = torch.ones(
(
expected_bsz,
input_shape[1],
),
dtype=torch.bool,
device=input.device,
)
if input_padding_mask is not None:
padded_input_padding_mask[: input_shape[0], :] = input_padding_mask
else:
padded_input_padding_mask[: input_shape[0], :] = False
input_padding_mask = padded_input_padding_mask
# Reshape into S tokens by dropping sequence dimension.
reshaped_input = input.reshape(-1, d_model)
reshaped_input_shape = reshaped_input.shape
reshaped_input_padding_mask = (
input_padding_mask.reshape(-1) if input_padding_mask is not None else None
)
# Doing padding here when --max-tokens is specified and not --batch-size or --max-sentences
# Pro of --max-tokens: more flexible for MT variable sequence lengths
# Con of --max-tokens: extra all-reduce needed to figure out optimal padding without running OOM
if expected_bsz == 0:
expected_dim = reshaped_input_shape[0] * torch.ones(
(1,), dtype=torch.long, device=input.device
)
dist.all_reduce(expected_dim, group=dist.group.WORLD, op=dist.ReduceOp.MAX)
expected_dim = int(expected_dim.item())
padded_input = torch.zeros(
(expected_dim, reshaped_input_shape[1]),
dtype=input.dtype,
layout=input.layout,
device=input.device,
)
padded_input[: reshaped_input_shape[0], :] = reshaped_input
reshaped_input = padded_input
padded_input_padding_mask = torch.ones(
(expected_dim,), dtype=torch.bool, device=padded_input.device
)
if reshaped_input_padding_mask is not None:
padded_input_padding_mask[
: reshaped_input_shape[0]
] = reshaped_input_padding_mask
else:
padded_input_padding_mask[: reshaped_input_shape[0]] = False
reshaped_input_padding_mask = padded_input_padding_mask
if has_tutel:
l_aux, self.metadata, C, E, indices_, locations_, gates_ = self.gate(
reshaped_input, reshaped_input_padding_mask
)
S, M = reshaped_input.size(0), reshaped_input.size(1)
if not hasattr(self, "_tutel_dispatcher"):
self._tutel_dispatcher = tutel_moe.fast_dispatcher(
E, C, M, dispatch_dtype=reshaped_input.dtype
)
self._tutel_dispatcher.update(indices_, locations_, gates_, capacity=C)
dispatched_input = self._tutel_dispatcher.encode(reshaped_input)
else:
l_aux, combine_weights, dispatch_mask, self.metadata = self.gate(
reshaped_input, reshaped_input_padding_mask
)
dispatch_mask = dispatch_mask.to(input.dtype).permute(
1, 2, 0
) # S,E,C -> E,C,S
E, C, S = dispatch_mask.size()
M = reshaped_input.size(1)
assert reshaped_input.size() == (S, M)
# einsum("sec,sm->ecm")
dispatched_input = torch.mm(
dispatch_mask.view(E * C, S), reshaped_input
) # -> (E*C),M
if self.all2all_size > 1:
dispatched_input = self.all_to_all_wrapper(dispatched_input)
# Re-shape after all-to-all: ecm -> gecm
dispatched_input = dispatched_input.reshape(
self.all2all_size, self.num_local_experts, -1, d_model
)
chunks = dispatched_input.chunk(self.num_local_experts, dim=1)
expert_outputs = []
for chunk, expert in zip(chunks, self.experts):
expert_outputs += [expert(chunk)]
expert_output = torch.cat(expert_outputs, dim=1)
if self.all2all_size > 1:
expert_output = self.all_to_all_wrapper(expert_output)
# Re-shape back: gecm -> ecm
expert_output = expert_output.reshape(
self.all2all_size * self.num_local_experts, -1, d_model
)
if has_tutel:
combined_output = self._tutel_dispatcher.decode(
expert_output.view(E * C, M)
)
else:
# einsum("sec,ecm->sm")
combined_output = combine_weights.view(S, E * C).mm(
expert_output.view(E * C, M)
)
# Remove padding here when --max-tokens is specified and not --batch-size or --max-sentences
combined_output = combined_output[: reshaped_input_shape[0], :]
combined_output = combined_output.reshape(input.shape)
combined_output = combined_output[: input_shape[0], :, :]
self.record_all_to_all_stats()
return combined_output, l_aux
def prepare_for_inference_(self):
self.in_generation = True
def all_to_all_wrapper(self, input: Tensor):
dummy_a2a = getattr(self.args, "dummy_a2a", False)
if dummy_a2a:
input = input.contiguous()
output = input.detach().clone()
return input
# always record times, since it is not a lot of overhead
# if we do not log it we simply clear it off in record_all_to_all_stats
cuda_start = torch.cuda.Event(enable_timing=True)
cuda_end = torch.cuda.Event(enable_timing=True)
cpu_start = time.time() * 1000
cuda_start.record()
output = _AllToAll.apply(self.all2all_group, input)
cuda_end.record()
cpu_end = time.time() * 1000
self.a2a_cpu_time_ms += cpu_end - cpu_start
self.a2a_cuda_event_intervals.append((cuda_start, cuda_end))
return output
def record_all_to_all_stats(self):
# controlled via an argument as we want to minimize any impact from torch.cuda.synchronize()
record_a2a_perf_stats = getattr(self.args, "record_a2a_perf_stats", False)
if record_a2a_perf_stats:
torch.cuda.synchronize()
self.metadata["all_to_all_cpu_time_ms"] = self.a2a_cpu_time_ms
a2a_cuda_time_ms = 0.0
for ev_start, ev_end in self.a2a_cuda_event_intervals:
a2a_cuda_time_ms += ev_start.elapsed_time(ev_end)
self.metadata["all_to_all_cuda_time_ms"] = a2a_cuda_time_ms
# reset stats
self.a2a_cpu_time_ms = 0.0
self.a2a_cuda_event_intervals = [] | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/torchscale/component/xmoe/moe_layer.py | moe_layer.py |
import math
import numpy as np
import torch
import torch.nn as nn
from fairscale.nn import checkpoint_wrapper, wrap
try:
from apex.normalization import FusedLayerNorm as LayerNorm
except ModuleNotFoundError:
from torch.nn import LayerNorm
from torchscale.architecture.utils import init_bert_params
from torchscale.component.droppath import DropPath
from torchscale.component.feedforward_network import FeedForwardNetwork, make_experts
from torchscale.component.multihead_attention import MultiheadAttention
from torchscale.component.multiway_network import MultiwayWrapper, set_split_position
from torchscale.component.relative_position_bias import RelativePositionBias
from torchscale.component.xmoe.moe_layer import MOELayer
from torchscale.component.xmoe.routing import Top1Gate, Top2Gate
class EncoderLayer(nn.Module):
def __init__(self, args, depth, is_moe_layer=False, is_encoder_decoder=False):
super().__init__()
self.args = args
self.embed_dim = args.encoder_embed_dim
self.self_attn = self.build_self_attention(self.embed_dim, args)
self.self_attn_layer_norm = MultiwayWrapper(args, LayerNorm(self.embed_dim, eps=args.layernorm_eps))
self.dropout_module = torch.nn.Dropout(args.dropout)
if args.drop_path_rate > 0:
drop_path_prob = np.linspace(0, args.drop_path_rate, args.encoder_layers)[
depth
]
self.drop_path = DropPath(drop_path_prob)
else:
self.drop_path = None
self.normalize_before = args.encoder_normalize_before
self.is_moe_layer = is_moe_layer
self.ffn_dim = args.encoder_ffn_embed_dim
if not self.is_moe_layer:
self.ffn = MultiwayWrapper(
args,
self.build_ffn(
self.embed_dim,
self.args,
),
)
else:
assert not self.args.multiway
if args.moe_top1_expert:
gate = Top1Gate(
self.embed_dim,
args.moe_expert_count,
use_fp32=args.moe_gating_use_fp32,
moe_eval_capacity_token_fraction=args.moe_eval_capacity_token_fraction,
use_xmoe=args.use_xmoe,
)
else:
gate = Top2Gate(
self.embed_dim,
args.moe_expert_count,
args.moe_gating_use_fp32,
args.moe_second_expert_policy,
args.moe_normalize_gate_prob_before_dropping,
args.moe_eval_capacity_token_fraction,
use_xmoe=args.use_xmoe,
)
experts = make_experts(args, self.embed_dim, self.ffn_dim)
self.moe_layer = MOELayer(gate, experts, args)
self.final_layer_norm = MultiwayWrapper(args, LayerNorm(self.embed_dim, eps=args.layernorm_eps))
if args.deepnorm:
if is_encoder_decoder:
self.alpha = (
math.pow(
math.pow(args.encoder_layers, 4) * args.decoder_layers, 0.0625
)
* 0.81
)
else:
self.alpha = math.pow(2.0 * args.encoder_layers, 0.25)
else:
self.alpha = 1.0
def build_ffn(self, embed_dim, args):
return FeedForwardNetwork(
embed_dim,
self.ffn_dim,
args.activation_fn,
args.dropout,
args.activation_dropout,
args.layernorm_eps,
args.subln,
)
def build_self_attention(self, embed_dim, args):
return MultiheadAttention(
args,
embed_dim,
args.encoder_attention_heads,
dropout=args.attention_dropout,
self_attention=True,
encoder_decoder_attention=False,
subln=args.subln,
)
def residual_connection(self, x, residual):
return residual * self.alpha + x
def forward(self, x, encoder_padding_mask, attn_mask=None, rel_pos=None, multiway_split_position=None, incremental_state=None):
if multiway_split_position is not None:
assert self.args.multiway
self.apply(set_split_position(multiway_split_position))
if attn_mask is not None:
attn_mask = attn_mask.masked_fill(attn_mask.to(torch.bool), -1e8)
residual = x
if self.normalize_before:
x = self.self_attn_layer_norm(x)
x, _ = self.self_attn(
query=x,
key=x,
value=x,
key_padding_mask=encoder_padding_mask,
attn_mask=attn_mask,
rel_pos=rel_pos,
incremental_state=incremental_state,
)
x = self.dropout_module(x)
if self.drop_path is not None:
x = self.drop_path(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.self_attn_layer_norm(x)
residual = x
if self.normalize_before:
x = self.final_layer_norm(x)
if not self.is_moe_layer:
x = self.ffn(x)
l_aux = None
else:
x = x.transpose(0, 1)
x, l_aux = self.moe_layer(x)
x = x.transpose(0, 1)
if self.drop_path is not None:
x = self.drop_path(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.final_layer_norm(x)
return x, l_aux
class Encoder(nn.Module):
def __init__(
self,
args,
embed_tokens=None,
embed_positions=None,
output_projection=None,
is_encoder_decoder=False,
**kwargs
):
self.args = args
super().__init__(**kwargs)
self.dropout_module = torch.nn.Dropout(args.dropout)
embed_dim = args.encoder_embed_dim
self.embed_scale = 1.0 if args.no_scale_embedding else math.sqrt(embed_dim)
self.embed_tokens = embed_tokens
self.embed_positions = embed_positions
if (
output_projection is None
and not is_encoder_decoder
and not args.no_output_layer
and args.vocab_size > 0
):
self.output_projection = self.build_output_projection(args)
else:
self.output_projection = output_projection
if args.layernorm_embedding:
self.layernorm_embedding = MultiwayWrapper(
args, LayerNorm(embed_dim, eps=args.layernorm_eps), dim=1
)
else:
self.layernorm_embedding = None
self.layers = nn.ModuleList([])
moe_freq = args.moe_freq
for i in range(args.encoder_layers):
is_moe_layer = moe_freq != 0 and (i + 1) % moe_freq == 0
self.layers.append(
self.build_encoder_layer(
args,
depth=i,
is_moe_layer=is_moe_layer,
is_encoder_decoder=is_encoder_decoder,
)
)
self.num_layers = len(self.layers)
if args.encoder_normalize_before and args.normalize_output:
self.layer_norm = MultiwayWrapper(args, LayerNorm(embed_dim, eps=args.layernorm_eps))
else:
self.layer_norm = None
if args.rel_pos_buckets > 0 and args.max_rel_pos > 0:
self.relative_position = RelativePositionBias(
num_buckets=args.rel_pos_buckets,
max_distance=args.max_rel_pos,
n_heads=args.encoder_attention_heads,
)
else:
self.relative_position = None
if args.bert_init:
self.apply(init_bert_params)
if args.deepnorm:
if is_encoder_decoder:
init_scale = (
math.pow(
math.pow(args.encoder_layers, 4) * args.decoder_layers, 0.0625
)
/ 1.15
)
else:
init_scale = math.pow(8.0 * args.encoder_layers, 0.25)
for name, p in self.named_parameters():
if (
"fc1" in name
or "fc2" in name
or "out_proj" in name
or "v_proj" in name
):
p.data.div_(init_scale)
if args.subln:
if is_encoder_decoder:
init_scale = math.sqrt(
math.log(3 * args.decoder_layers)
* math.log(2 * args.encoder_layers)
/ 3
)
else:
init_scale = math.sqrt(math.log(args.encoder_layers * 2))
for name, p in self.named_parameters():
if (
"fc1" in name
or "fc2" in name
or "out_proj" in name
or "v_proj" in name
):
p.data.mul_(init_scale)
def build_output_projection(
self,
args,
):
if args.share_encoder_input_output_embed:
assert args.encoder_embedding_type == "language"
output_projection = torch.nn.Linear(
self.embed_tokens.weight.shape[1],
self.embed_tokens.weight.shape[0],
bias=False,
)
output_projection.weight = self.embed_tokens.weight
else:
output_projection = torch.nn.Linear(
args.encoder_embed_dim, args.vocab_size, bias=False
)
torch.nn.init.normal_(
output_projection.weight, mean=0, std=args.encoder_embed_dim**-0.5
)
return output_projection
def build_encoder_layer(
self, args, depth, is_moe_layer=False, is_encoder_decoder=False
):
layer = EncoderLayer(
args,
depth,
is_moe_layer=is_moe_layer,
is_encoder_decoder=is_encoder_decoder,
)
if args.checkpoint_activations:
layer = checkpoint_wrapper(layer)
if args.fsdp:
layer = wrap(layer)
return layer
def forward_embedding(
self,
src_tokens,
token_embedding=None,
positions=None,
):
if token_embedding is None:
token_embedding = self.embed_tokens(src_tokens)
x = embed = self.embed_scale * token_embedding
if self.embed_positions is not None:
if src_tokens is not None:
x = embed + self.embed_positions(src_tokens, positions=positions)
else:
x = embed + self.embed_positions(x, positions=positions)
if self.layernorm_embedding is not None:
x = self.layernorm_embedding(x)
x = self.dropout_module(x)
return x, embed
def forward(
self,
src_tokens,
encoder_padding_mask=None,
attn_mask=None,
return_all_hiddens=False,
token_embeddings=None,
multiway_split_position=None,
features_only=False,
incremental_state=None,
positions=None,
**kwargs
):
assert src_tokens is not None or token_embeddings is not None
if encoder_padding_mask is None:
if src_tokens is not None:
encoder_padding_mask = torch.zeros_like(
src_tokens, device=src_tokens.device
).bool()
else:
encoder_padding_mask = torch.zeros(
[token_embeddings.size(0), token_embeddings.size(1)],
device=token_embeddings.device,
).bool()
if multiway_split_position is not None:
assert self.args.multiway
self.apply(set_split_position(multiway_split_position))
x, encoder_embedding = self.forward_embedding(src_tokens, token_embeddings, positions)
x = x * (1 - encoder_padding_mask.unsqueeze(-1).type_as(x))
encoder_states = []
if return_all_hiddens:
encoder_states.append(x)
rel_pos_bias = None
if self.relative_position is not None:
rel_pos_bias = self.relative_position(
batch_size=x.size(0), qlen=x.size(1), klen=x.size(1)
)
# incremental_state is not None during inference if we use the bidirectional encoder as a generator as in s2s-ft (https://arxiv.org/abs/2110.13640)
l_aux = []
for idx, layer in enumerate(self.layers):
x, l_aux_i = layer(
x,
encoder_padding_mask=encoder_padding_mask if incremental_state is None else None,
attn_mask=attn_mask,
rel_pos=rel_pos_bias,
multiway_split_position=multiway_split_position,
incremental_state=incremental_state[idx] if incremental_state is not None else None,
)
if return_all_hiddens:
assert encoder_states is not None
encoder_states.append(x)
l_aux.append(l_aux_i)
if self.layer_norm is not None:
x = self.layer_norm(x)
if not features_only and self.output_projection is not None:
x = self.output_projection(x)
return {
"encoder_out": x,
"encoder_embedding": encoder_embedding,
"encoder_padding_mask": encoder_padding_mask,
"encoder_states": encoder_states,
"l_aux": l_aux,
} | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/torchscale/architecture/encoder.py | encoder.py |
class EncoderConfig(object):
def __init__(self, **kwargs):
self.encoder_embed_dim = kwargs.pop("encoder_embed_dim", 768)
self.encoder_attention_heads = kwargs.pop("encoder_attention_heads", 12)
self.encoder_ffn_embed_dim = kwargs.pop("encoder_ffn_embed_dim", 3072)
self.encoder_layers = kwargs.pop("encoder_layers", 12)
self.encoder_normalize_before = kwargs.pop("encoder_normalize_before", True)
self.normalize_output = kwargs.pop("normalize_output", True)
self.activation_fn = kwargs.pop("activation_fn", "gelu")
# self.activation_fn = kwargs.pop("activation_fn", "flashattention")
self.dropout = kwargs.pop("dropout", 0.0)
self.drop_path_rate = kwargs.pop("drop_path_rate", 0.0)
self.attention_dropout = kwargs.pop("attention_dropout", 0.0)
self.activation_dropout = kwargs.pop("activation_dropout", 0.0)
self.no_scale_embedding = kwargs.pop("no_scale_embedding", True)
self.layernorm_embedding = kwargs.pop("layernorm_embedding", False)
self.moe_freq = kwargs.pop("moe_freq", 0)
self.moe_top1_expert = kwargs.pop("moe_top1_expert", False)
self.moe_expert_count = kwargs.pop("moe_expert_count", 0)
self.moe_gating_use_fp32 = kwargs.pop("moe_gating_use_fp32", True)
self.moe_eval_capacity_token_fraction = kwargs.pop(
"moe_eval_capacity_token_fraction", 0.25
)
self.moe_second_expert_policy = kwargs.pop("moe_second_expert_policy", "random")
self.moe_normalize_gate_prob_before_dropping = kwargs.pop(
"moe_normalize_gate_prob_before_dropping", False
)
self.use_xmoe = kwargs.pop("use_xmoe", False)
self.rel_pos_buckets = kwargs.pop("rel_pos_buckets", 0)
self.max_rel_pos = kwargs.pop("max_rel_pos", 0)
self.deepnorm = kwargs.pop("deepnorm", False)
self.subln = kwargs.pop("subln", True)
self.bert_init = kwargs.pop("bert_init", False)
self.multiway = kwargs.pop("multiway", False)
self.share_encoder_input_output_embed = kwargs.pop(
"share_encoder_input_output_embed", False
)
self.max_source_positions = kwargs.pop("max_source_positions", 1024)
self.no_output_layer = kwargs.pop("no_output_layer", False)
self.layernorm_eps = kwargs.pop("layernorm_eps", 1e-5)
# Text
self.vocab_size = kwargs.pop("vocab_size", -1)
# Vision
self.img_size = kwargs.pop("img_size", 224)
self.patch_size = kwargs.pop("patch_size", 16)
self.in_chans = kwargs.pop("in_chans", 3)
# Fairscale
self.checkpoint_activations = kwargs.pop("checkpoint_activations", False)
self.fsdp = kwargs.pop("fsdp", False)
self.ddp_rank = kwargs.pop("ddp_rank", 0)
self.xpos_rel_pos = kwargs.pop("xpos_rel_pos", False)
self.xpos_scale_base = kwargs.pop("xpos_scale_base", 512)
if self.deepnorm:
self.encoder_normalize_before = False
self.subln = False
if self.subln:
self.encoder_normalize_before = True
self.deepnorm = False
if self.use_xmoe:
self.moe_normalize_gate_prob_before_dropping = True
self.moe_second_expert_policy = "random"
assert self.moe_freq > 0 and self.moe_expert_count > 0
def override(self, args):
for hp in self.__dict__.keys():
if getattr(args, hp, None) is not None:
self.__dict__[hp] = getattr(args, hp, None)
class DecoderConfig(object):
def __init__(self, **kwargs):
self.decoder_embed_dim = kwargs.pop("decoder_embed_dim", 768)
self.decoder_attention_heads = kwargs.pop("decoder_attention_heads", 12)
self.decoder_ffn_embed_dim = kwargs.pop("decoder_ffn_embed_dim", 3072)
self.decoder_layers = kwargs.pop("decoder_layers", 12)
self.decoder_normalize_before = kwargs.pop("decoder_normalize_before", True)
self.activation_fn = kwargs.pop("activation_fn", "gelu")
self.dropout = kwargs.pop("dropout", 0.0)
self.drop_path_rate = kwargs.pop("drop_path_rate", 0.0)
self.attention_dropout = kwargs.pop("attention_dropout", 0.0)
self.activation_dropout = kwargs.pop("activation_dropout", 0.0)
self.no_scale_embedding = kwargs.pop("no_scale_embedding", True)
self.layernorm_embedding = kwargs.pop("layernorm_embedding", False)
self.moe_freq = kwargs.pop("moe_freq", 0)
self.moe_top1_expert = kwargs.pop("moe_top1_expert", False)
self.moe_expert_count = kwargs.pop("moe_expert_count", 0)
self.moe_gating_use_fp32 = kwargs.pop("moe_gating_use_fp32", True)
self.moe_eval_capacity_token_fraction = kwargs.pop(
"moe_eval_capacity_token_fraction", 0.25
)
self.moe_second_expert_policy = kwargs.pop("moe_second_expert_policy", "random")
self.moe_normalize_gate_prob_before_dropping = kwargs.pop(
"moe_normalize_gate_prob_before_dropping", False
)
self.use_xmoe = kwargs.pop("use_xmoe", False)
self.rel_pos_buckets = kwargs.pop("rel_pos_buckets", 0)
self.max_rel_pos = kwargs.pop("max_rel_pos", 0)
self.deepnorm = kwargs.pop("deepnorm", False)
self.subln = kwargs.pop("subln", True)
self.bert_init = kwargs.pop("bert_init", False)
self.multiway = kwargs.pop("multiway", False)
self.share_decoder_input_output_embed = kwargs.pop(
"share_decoder_input_output_embed", False
)
self.max_target_positions = kwargs.pop("max_target_positions", 1024)
self.no_output_layer = kwargs.pop("no_output_layer", False)
self.layernorm_eps = kwargs.pop("layernorm_eps", 1e-5)
# Text
self.vocab_size = kwargs.pop("vocab_size", -1)
# Fairscale
self.checkpoint_activations = kwargs.pop("checkpoint_activations", False)
self.fsdp = kwargs.pop("fsdp", False)
self.ddp_rank = kwargs.pop("ddp_rank", 0)
self.xpos_rel_pos = kwargs.pop("xpos_rel_pos", False)
self.xpos_scale_base = kwargs.pop("xpos_scale_base", 512)
if self.deepnorm:
self.decoder_normalize_before = False
self.subln = False
if self.subln:
self.decoder_normalize_before = True
self.deepnorm = False
if self.use_xmoe:
self.moe_normalize_gate_prob_before_dropping = True
self.moe_second_expert_policy = "random"
assert self.moe_freq > 0 and self.moe_expert_count > 0
def override(self, args):
for hp in self.__dict__.keys():
if getattr(args, hp, None) is not None:
self.__dict__[hp] = getattr(args, hp, None)
class EncoderDecoderConfig(object):
def __init__(self, **kwargs):
self.encoder_embed_dim = kwargs.pop("encoder_embed_dim", 768)
self.encoder_attention_heads = kwargs.pop("encoder_attention_heads", 12)
self.encoder_ffn_embed_dim = kwargs.pop("encoder_ffn_embed_dim", 3072)
self.encoder_layers = kwargs.pop("encoder_layers", 12)
self.encoder_normalize_before = kwargs.pop("encoder_normalize_before", True)
self.decoder_embed_dim = kwargs.pop("decoder_embed_dim", 768)
self.decoder_attention_heads = kwargs.pop("decoder_attention_heads", 12)
self.decoder_ffn_embed_dim = kwargs.pop("decoder_ffn_embed_dim", 3072)
self.decoder_layers = kwargs.pop("decoder_layers", 12)
self.decoder_normalize_before = kwargs.pop("decoder_normalize_before", True)
self.activation_fn = kwargs.pop("activation_fn", "gelu")
self.dropout = kwargs.pop("dropout", 0.0)
self.drop_path_rate = kwargs.pop("drop_path_rate", 0.0)
self.attention_dropout = kwargs.pop("attention_dropout", 0.0)
self.activation_dropout = kwargs.pop("activation_dropout", 0.0)
self.no_scale_embedding = kwargs.pop("no_scale_embedding", True)
self.layernorm_embedding = kwargs.pop("layernorm_embedding", False)
self.moe_freq = kwargs.pop("moe_freq", 0)
self.moe_top1_expert = kwargs.pop("moe_top1_expert", False)
self.moe_expert_count = kwargs.pop("moe_expert_count", 0)
self.moe_gating_use_fp32 = kwargs.pop("moe_gating_use_fp32", True)
self.moe_eval_capacity_token_fraction = kwargs.pop(
"moe_eval_capacity_token_fraction", 0.25
)
self.moe_second_expert_policy = kwargs.pop("moe_second_expert_policy", "random")
self.moe_normalize_gate_prob_before_dropping = kwargs.pop(
"moe_normalize_gate_prob_before_dropping", False
)
self.use_xmoe = kwargs.pop("use_xmoe", False)
self.rel_pos_buckets = kwargs.pop("rel_pos_buckets", 0)
self.max_rel_pos = kwargs.pop("max_rel_pos", 0)
self.deepnorm = kwargs.pop("deepnorm", False)
self.subln = kwargs.pop("subln", True)
self.bert_init = kwargs.pop("bert_init", False)
self.multiway = kwargs.pop("multiway", False)
self.share_all_embeddings = kwargs.pop("share_all_embeddings", False)
self.share_decoder_input_output_embed = kwargs.pop(
"share_decoder_input_output_embed", False
)
self.max_source_positions = kwargs.pop("max_source_positions", 1024)
self.max_target_positions = kwargs.pop("max_target_positions", 1024)
self.no_output_layer = kwargs.pop("no_output_layer", False)
self.layernorm_eps = kwargs.pop("layernorm_eps", 1e-5)
# Text
self.vocab_size = kwargs.pop("vocab_size", -1)
# Fairscale
self.checkpoint_activations = kwargs.pop("checkpoint_activations", False)
self.fsdp = kwargs.pop("fsdp", False)
self.ddp_rank = kwargs.pop("ddp_rank", 0)
self.xpos_rel_pos = kwargs.pop("xpos_rel_pos", False)
self.xpos_scale_base = kwargs.pop("xpos_scale_base", 512)
if self.deepnorm:
self.encoder_normalize_before = False
self.decoder_normalize_before = False
self.subln = False
if self.subln:
self.encoder_normalize_before = True
self.decoder_normalize_before = True
self.deepnorm = False
if self.use_xmoe:
self.moe_normalize_gate_prob_before_dropping = True
self.moe_second_expert_policy = "random"
assert self.moe_freq > 0 and self.moe_expert_count > 0
def override(self, args):
for hp in self.__dict__.keys():
if getattr(args, hp, None) is not None:
self.__dict__[hp] = getattr(args, hp, None) | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/torchscale/architecture/config.py | config.py |
import math
import numpy as np
import torch
import torch.nn as nn
from fairscale.nn import checkpoint_wrapper, wrap
from torchscale.architecture.utils import init_bert_params
from torchscale.component.droppath import DropPath
from torchscale.component.feedforward_network import FeedForwardNetwork, make_experts
from torchscale.component.multihead_attention import MultiheadAttention
from torchscale.component.relative_position_bias import RelativePositionBias
from torchscale.component.xmoe.moe_layer import MOELayer
from torchscale.component.xmoe.routing import Top1Gate, Top2Gate
try:
from apex.normalization import FusedLayerNorm as LayerNorm
except ModuleNotFoundError:
from torch.nn import LayerNorm
class DecoderLayer(nn.Module):
def __init__(
self,
args,
depth,
is_moe_layer=False,
is_encoder_decoder=False,
):
super().__init__()
self.args = args
self.embed_dim = args.decoder_embed_dim
self.dropout_module = torch.nn.Dropout(args.dropout)
if args.drop_path_rate > 0:
drop_path_prob = np.linspace(0, args.drop_path_rate, args.decoder_layers)[
depth
]
self.drop_path = DropPath(drop_path_prob)
else:
self.drop_path = None
self.self_attn = self.build_self_attention(self.embed_dim, args)
self.normalize_before = args.decoder_normalize_before
self.self_attn_layer_norm = LayerNorm(self.embed_dim, eps=args.layernorm_eps)
if not is_encoder_decoder:
self.encoder_attn = None
self.encoder_attn_layer_norm = None
else:
self.encoder_attn = self.build_encoder_attention(self.embed_dim, args)
self.encoder_attn_layer_norm = LayerNorm(self.embed_dim, eps=args.layernorm_eps)
self.is_moe_layer = is_moe_layer
self.ffn_dim = args.decoder_ffn_embed_dim
if not self.is_moe_layer:
self.ffn = self.build_ffn(
self.embed_dim,
self.args,
)
else:
if args.moe_top1_expert:
gate = Top1Gate(
self.embed_dim,
args.moe_expert_count,
use_fp32=args.moe_gating_use_fp32,
moe_eval_capacity_token_fraction=args.moe_eval_capacity_token_fraction,
use_xmoe=args.use_xmoe,
)
else:
gate = Top2Gate(
self.embed_dim,
args.moe_expert_count,
args.moe_gating_use_fp32,
args.moe_second_expert_policy,
args.moe_normalize_gate_prob_before_dropping,
args.moe_eval_capacity_token_fraction,
use_xmoe=args.use_xmoe,
)
experts = make_experts(args, self.embed_dim, self.ffn_dim)
self.moe_layer = MOELayer(gate, experts, args)
self.final_layer_norm = LayerNorm(self.embed_dim, eps=args.layernorm_eps)
if args.deepnorm:
if is_encoder_decoder:
self.alpha = math.pow(3.0 * args.decoder_layers, 0.25)
else:
self.alpha = math.pow(2.0 * args.decoder_layers, 0.25)
else:
self.alpha = 1.0
def build_ffn(self, embed_dim, args):
return FeedForwardNetwork(
embed_dim,
self.ffn_dim,
args.activation_fn,
args.dropout,
args.activation_dropout,
args.layernorm_eps,
args.subln,
)
def build_self_attention(self, embed_dim, args):
return MultiheadAttention(
args,
embed_dim,
args.decoder_attention_heads,
dropout=args.attention_dropout,
self_attention=True,
encoder_decoder_attention=False,
subln=args.subln,
)
def build_encoder_attention(self, embed_dim, args):
return MultiheadAttention(
args,
embed_dim,
args.decoder_attention_heads,
dropout=args.attention_dropout,
self_attention=False,
encoder_decoder_attention=True,
subln=args.subln,
)
def residual_connection(self, x, residual):
return residual * self.alpha + x
def forward(
self,
x,
encoder_out=None,
encoder_padding_mask=None,
incremental_state=None,
self_attn_mask=None,
self_attn_padding_mask=None,
self_attn_rel_pos=None,
cross_attn_rel_pos=None,
):
residual = x
if self.normalize_before:
x = self.self_attn_layer_norm(x)
x, attn = self.self_attn(
query=x,
key=x,
value=x,
key_padding_mask=self_attn_padding_mask,
incremental_state=incremental_state,
attn_mask=self_attn_mask,
rel_pos=self_attn_rel_pos,
)
x = self.dropout_module(x)
if self.drop_path is not None:
x = self.drop_path(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.self_attn_layer_norm(x)
if self.encoder_attn is not None and encoder_out is not None:
residual = x
if self.normalize_before:
x = self.encoder_attn_layer_norm(x)
x, attn = self.encoder_attn(
query=x,
key=encoder_out,
value=encoder_out,
key_padding_mask=encoder_padding_mask,
incremental_state=None,
rel_pos=cross_attn_rel_pos,
)
x = self.dropout_module(x)
if self.drop_path is not None:
x = self.drop_path(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.encoder_attn_layer_norm(x)
residual = x
if self.normalize_before:
x = self.final_layer_norm(x)
if not self.is_moe_layer:
x = self.ffn(x)
l_aux = None
else:
x, l_aux = self.moe_layer(x)
if self.drop_path is not None:
x = self.drop_path(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.final_layer_norm(x)
return x, attn, None, l_aux
class Decoder(nn.Module):
def __init__(
self,
args,
embed_tokens=None,
embed_positions=None,
output_projection=None,
is_encoder_decoder=False,
**kwargs
):
super().__init__(**kwargs)
self.args = args
self.dropout_module = torch.nn.Dropout(args.dropout)
embed_dim = args.decoder_embed_dim
self.embed_dim = embed_dim
self.embed_scale = 1.0 if args.no_scale_embedding else math.sqrt(embed_dim)
self.embed_tokens = embed_tokens
self.embed_positions = embed_positions
if (
output_projection is None
and not args.no_output_layer
and args.vocab_size > 0
):
self.output_projection = self.build_output_projection(args)
else:
self.output_projection = output_projection
if args.layernorm_embedding:
self.layernorm_embedding = LayerNorm(embed_dim, eps=args.layernorm_eps)
else:
self.layernorm_embedding = None
self.layers = nn.ModuleList([])
moe_freq = args.moe_freq
for i in range(args.decoder_layers):
is_moe_layer = moe_freq != 0 and (i + 1) % moe_freq == 0
self.layers.append(
self.build_decoder_layer(
args,
depth=i,
is_moe_layer=is_moe_layer,
is_encoder_decoder=is_encoder_decoder,
)
)
self.num_layers = len(self.layers)
if args.decoder_normalize_before:
self.layer_norm = LayerNorm(embed_dim, eps=args.layernorm_eps)
else:
self.layer_norm = None
self.self_attn_relative_position = None
self.cross_attn_relative_position = None
if args.rel_pos_buckets > 0 and args.max_rel_pos > 0:
self.self_attn_relative_position = RelativePositionBias(
num_buckets=args.rel_pos_buckets,
max_distance=args.max_rel_pos,
n_heads=args.decoder_attention_heads,
)
if is_encoder_decoder:
self.cross_attn_relative_position = RelativePositionBias(
num_buckets=args.rel_pos_buckets,
max_distance=args.max_rel_pos,
n_heads=args.decoder_attention_heads,
)
if args.bert_init:
self.apply(init_bert_params)
if args.deepnorm:
if is_encoder_decoder:
init_scale = math.pow(12.0 * args.decoder_layers, 0.25)
else:
init_scale = math.pow(8.0 * args.decoder_layers, 0.25)
for name, p in self.named_parameters():
if (
"fc1" in name
or "fc2" in name
or "out_proj" in name
or "v_proj" in name
):
p.data.div_(init_scale)
if args.subln:
if is_encoder_decoder:
init_scale = math.sqrt(math.log(args.decoder_layers * 3))
else:
init_scale = math.sqrt(math.log(args.decoder_layers * 2))
for name, p in self.named_parameters():
if "encoder_attn" in name:
continue
if (
"fc1" in name
or "fc2" in name
or "out_proj" in name
or "v_proj" in name
):
p.data.mul_(init_scale)
def build_output_projection(
self,
args,
):
if args.share_decoder_input_output_embed:
output_projection = torch.nn.Linear(
self.embed_tokens.weight.shape[1],
self.embed_tokens.weight.shape[0],
bias=False,
)
output_projection.weight = self.embed_tokens.weight
else:
output_projection = torch.nn.Linear(
args.decoder_embed_dim, args.vocab_size, bias=False
)
torch.nn.init.normal_(
output_projection.weight, mean=0, std=args.decoder_embed_dim**-0.5
)
return output_projection
def build_decoder_layer(
self, args, depth, is_moe_layer=False, is_encoder_decoder=False
):
layer = DecoderLayer(
args,
depth,
is_moe_layer=is_moe_layer,
is_encoder_decoder=is_encoder_decoder,
)
if args.checkpoint_activations:
layer = checkpoint_wrapper(layer)
if args.fsdp:
layer = wrap(layer)
return layer
def forward_embedding(
self,
tokens,
token_embedding=None,
incremental_state=None,
):
positions = None
if self.embed_positions is not None:
positions = self.embed_positions(
tokens, incremental_state=incremental_state
)
if incremental_state is not None:
tokens = tokens[:, -1:]
if positions is not None:
positions = positions[:, -1:]
if token_embedding is None:
token_embedding = self.embed_tokens(tokens)
x = embed = self.embed_scale * token_embedding
if positions is not None:
x += positions
if self.layernorm_embedding is not None:
x = self.layernorm_embedding(x)
x = self.dropout_module(x)
return x, embed
def forward(
self,
prev_output_tokens,
self_attn_padding_mask=None,
encoder_out=None,
incremental_state=None,
features_only=False,
return_all_hiddens=False,
token_embeddings=None,
**kwargs
):
# embed tokens and positions
x, _ = self.forward_embedding(
prev_output_tokens, token_embeddings, incremental_state
)
# relative position
self_attn_rel_pos_bias = None
slen = prev_output_tokens.size(1)
if self.self_attn_relative_position is not None:
self_attn_rel_pos_bias = self.self_attn_relative_position(
batch_size=x.size(0), qlen=slen, klen=slen
)
if incremental_state is not None:
self_attn_rel_pos_bias = self_attn_rel_pos_bias[-1:, :, :]
cross_attn_rel_pos_bias = None
if self.cross_attn_relative_position is not None:
cross_attn_rel_pos_bias = self.cross_attn_relative_position(
batch_size=x.size(0),
qlen=slen,
klen=encoder_out["encoder_out"].size(1),
)
if incremental_state is not None:
cross_attn_rel_pos_bias = cross_attn_rel_pos_bias[-1:, :, :]
# decoder layers
inner_states = [x]
if encoder_out is None:
l_aux = []
else:
l_aux = encoder_out["l_aux"] if "l_aux" in encoder_out else []
for idx, layer in enumerate(self.layers):
if incremental_state is None:
self_attn_mask = torch.triu(
torch.zeros([x.size(1), x.size(1)])
.float()
.fill_(float("-inf"))
.type_as(x),
1,
)
else:
self_attn_mask = None
if idx not in incremental_state:
incremental_state[idx] = {}
x, layer_attn, _, l_aux_i = layer(
x,
encoder_out["encoder_out"] if encoder_out is not None else None,
encoder_out["encoder_padding_mask"]
if encoder_out is not None
else None,
incremental_state[idx] if incremental_state is not None else None,
self_attn_mask=self_attn_mask,
self_attn_padding_mask=self_attn_padding_mask,
self_attn_rel_pos=self_attn_rel_pos_bias,
cross_attn_rel_pos=cross_attn_rel_pos_bias,
)
l_aux.append(l_aux_i)
inner_states.append(x)
if self.layer_norm is not None:
x = self.layer_norm(x)
if not features_only:
x = self.output_layer(x)
return x, {
"inner_states": inner_states,
"l_aux": l_aux,
"attn": None,
}
def output_layer(self, features):
return self.output_projection(features) | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/torchscale/architecture/decoder.py | decoder.py |
import torch
import torch.nn as nn
from torchscale.architecture.encoder import Encoder
from torchscale.component.embedding import (
PositionalEmbedding,
TextEmbedding,
VisionEmbedding,
)
from torchscale.component.multiway_network import MutliwayEmbedding
class BEiT3(nn.Module):
def __init__(self, args, **kwargs):
super().__init__()
self.args = args
assert args.multiway
assert args.vocab_size > 0
assert not args.share_encoder_input_output_embed
self.text_embed = TextEmbedding(args.vocab_size, args.encoder_embed_dim)
self.vision_embed = VisionEmbedding(
args.img_size,
args.patch_size,
args.in_chans,
args.encoder_embed_dim,
contain_mask_token=True,
prepend_cls_token=True,
)
# being consistent with Fairseq, which starts from 2 for position embedding
embed_positions = MutliwayEmbedding(
modules=[
PositionalEmbedding(self.vision_embed.num_position_embeddings() + 2, args.encoder_embed_dim),
PositionalEmbedding(args.max_source_positions, args.encoder_embed_dim),
],
dim=1,
)
self.encoder = Encoder(
args,
embed_tokens=None,
embed_positions=embed_positions,
output_projection=None,
is_encoder_decoder=False,
)
def forward(
self,
textual_tokens=None,
visual_tokens=None,
text_padding_position=None,
attn_mask=None,
vision_masked_position=None,
incremental_state=None,
positions=None,
):
assert textual_tokens is not None or visual_tokens is not None
if textual_tokens is None:
x = self.vision_embed(visual_tokens, vision_masked_position)
encoder_padding_mask = None
multiway_split_position = -1
elif visual_tokens is None:
x = self.text_embed(textual_tokens)
encoder_padding_mask = text_padding_position
multiway_split_position = 0
else:
x1 = self.vision_embed(visual_tokens, vision_masked_position)
multiway_split_position = x1.size(1)
x2 = self.text_embed(textual_tokens)
x = torch.cat([x1, x2], dim=1)
if text_padding_position is not None:
encoder_padding_mask = torch.cat(
[
torch.zeros(x1.shape[:-1]).to(x1.device).bool(),
text_padding_position,
],
dim=1,
)
else:
encoder_padding_mask = None
encoder_out = self.encoder(
src_tokens=None,
encoder_padding_mask=encoder_padding_mask,
attn_mask=attn_mask,
token_embeddings=x,
multiway_split_position=multiway_split_position,
incremental_state=incremental_state,
positions=positions,
)
encoder_out["multiway_split_position"] = multiway_split_position
return encoder_out | APAC-SCALE | /APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/torchscale/model/BEiT3.py | BEiT3.py |
######
APASVO
######
*A graphical tool to perform event detection/picking in seismic traces.*
**Main Features**
* Three different picking algorithms available: STA-LTA [1]_, AMPA [2]_ and Takanami's autoregressive method [3]_.
* Proper functionality from DSP tools: scrolling, zooming, panning, playbacking...
* Signal envelope, spectrogram and estimated characteristic function visualization.
* Manually editing of picked seismic events or picking new ones.
* Detect mode: Find all characteristic function's peaks which value is over a threshold value.
* Support for text/binary files containing seismic traces.
* Save picked events to CSV format, and characteristic function to text/binary file format.
* Two additional command line tools: An event picking/detection tool and a synthetic earthquake generator [4]_.
.. contents:: **Table of Contents**
:local:
:backlinks: none
============
Installation
============
-------
Windows
-------
A prebuilt version of APASVO for Windows is available, compatible with 32-bit and 64-bit machines. You can download it `here`_.
Prebuilt package contains all the required software dependencies to work. Just unzip its contents into a directory of your choice and then you can start using the application.
.. _here: https://github.com/jemromerol/apasvo/releases
-----
Linux
-----
~~~~~~~~~~~~~~~~~
Prebuilt packages
~~~~~~~~~~~~~~~~~
Prebuilt distributions are the recommended installation method because they don't require installing any extra software. Just download the appropriate package for your architecture, unzip its contents into the directory of your choice and you can start using the application.
Prebuilt packages of APASVO for Linux are available for both 32-bit and 64-bit architectures. You can download them `here`_.
.. warning::
Prebuilt packages for Linux require GLIBC version 2.13 or newer to work. You can check your GLIBC version with:
::
$ ldd --version
.. _here: https://github.com/jemromerol/apasvo/releases
~~~~~~~~~~~~~~~~~~~~~~
Installation from Pypi
~~~~~~~~~~~~~~~~~~~~~~
.. warning::
Installing from PyPI is a long and delicate process that involves installing several large libraries and their dependencies, so it is discouraged unless you are confident about installing python applications with multiple dependencies from source. In any case, PREBUILT PACKAGES ARE THE RECOMMENDED WAY OF INSTALLING APASVO.
*************
Prerequisites
*************
Make sure you have Python 2.7.x installed. Then, install the latest `pip`_ distribution.
*************************************
Installation of required dependencies
*************************************
APASVO depends on a list of Python packages, which you can check in the project's `requirements.txt`_ file. These packages are automatically installed when APASVO is installed from Python repositories by using ``pip`` or from source code via `setuptools`_.
However, some of these packages, namely Matplotlib and PySide, require installation of a number of additional dependencies. If you're on a Debian / Ubuntu system, you can install these dependencies using the command:
::
$ sudo apt-get build-dep python-pyside python-matplotlib
Or if you are in Fedora/RedHat, first install ``yum-builddep`` and then use the command:
::
$ su -c "yum-builddep python-pyside python-matplotlib"
*******
Install
*******
You can install the latest version of APASVO from Python repositories by using the command:
::
$ pip install --use-wheel apasvo
~~~~~~~~~~~~~~~~~~~~~~~~
Installation from source
~~~~~~~~~~~~~~~~~~~~~~~~
First, make sure you meet the requirements explained in `Prerequisites`_ and install the needed dependencies as explained in `Installation of required dependencies`_ section.
Then, download the latest version from `GitHub`_. If you have ``git`` installed, you can use the following command:
::
$ git clone https://github.com/jemromerol/apasvo.git
Finally, enter the newly created directory containing the source code and run:
::
$ python setup.py install
.. _pip: http://pip.readthedocs.org/en/latest/installing.html
.. _requirements.txt: https://github.com/jemromerol/apasvo/blob/master/requirements.txt
.. _setuptools: https://pythonhosted.org/an_example_pypi_project/setuptools.html#using-setup-py
.. _GitHub: https://github.com/jemromerol/apasvo
----
OS X
----
Sorry, but no precompiled version for OS X is available yet. You can try to install it from Python repositories or from source by following a similar procedure to that described for `Linux`_.
===========
Screenshots
===========
* http://jemromerol.github.io/media/apasvo-screenshot-1.jpg
* http://jemromerol.github.io/media/apasvo-screenshot-2.jpg
* http://jemromerol.github.io/media/apasvo-screenshot-3.jpg
* http://jemromerol.github.io/media/apasvo-screenshot-4.jpg
* http://jemromerol.github.io/media/apasvo-screenshot-5.jpg
* http://jemromerol.github.io/media/apasvo-screenshot-6.jpg
=======
License
=======
Licensed under the `GPLv3`_ license.
.. _GPLv3: http://www.gnu.org/licenses/gpl-3.0.html
=======
Authors
=======
José Emilio Romero López. jemromerol@gmail.com
==========
References
==========
.. [1] Trnkoczy, A. (2002). Understanding and parameter setting of STA/LTA trigger
algorithm. IASPEI New Manual of Seismological Observatory Practice, 2, 1-19.
.. [2] Álvarez, I., García, L., Mota, S., Cortés, G., Benítez, C., & De la Torre, A. (2013).
An Automatic P-Phase Picking Algorithm Based on Adaptive Multiband Processing.
Geoscience and Remote Sensing Letters, IEEE, Volume: 10, Issue: 6, pp. 1488 - 1492
.. [3] Takanami, T., & Kitagawa, G. (1988).
A new efficient procedure for the estimation of onset times of seismic waves.
Journal of Physics of the Earth, 36(6), 267-290.
.. [4] Peterson, Jon. "Observations and modeling of seismic background noise." (1993): 93-95.
=========
Changelog
=========
* 0.0.6 (2016-02-07)
* Add bandpass filtering options
* 0.0.5 (2015-11-30)
* Add a trace selector window to handle multitrace files. It also allows to open multiple
files and switch between them.
* Fix several bugs.
* 0.0.4 (2015-11-09)
* Refactor apasvo classes to use Obspy library. Thanks to Obspy, now the application supports multiple input
formats (wav, sac, mseed, segy, ...) besides binary & text, multiple export event formats (NonLinLoc, QuakeML...)
and (virtually) support for multitrace files.
* Redesign apasvo-detector to detect events for multitrace files in batch.
* Fix several bugs
* 0.0.3 (2014-08-16)
* Fixed several bugs.
* 0.0.2 (2014-06-02)
* Fixed several bugs.
* Improve installation files.
* 0.0.1 (2014-05-16)
| APASVO | /APASVO-0.0.6.tar.gz/APASVO-0.0.6/README.rst | README.rst |
import argparse
import sys
import os
import itertools
import multiprocessing
from apasvo._version import __version__
from apasvo.utils import parse
from apasvo.utils import collections
from apasvo.picking import stalta
from apasvo.picking import ampa
from apasvo.picking import apasvotrace as rc
INPUT_FORMAT_MAP = {
'binary': 'binary',
'text': 'text',
'autodetect': None,
'sac': 'SAC',
'mseed': 'MSEED',
}
OUTPUT_FORMAT_MAP = {
'nonlinloc': 'NLLOC_OBS',
'quakeml': 'QUAKEML',
'json': 'JSON',
}
OUTPUT_EXTENSION_SET = {
'NLLOC_OBS': '.hyp',
'QUAKEML': '.xml',
'JSON': '.json',
}
METHOD_MAP = {
'stalta': stalta.StaLta,
'ampa': ampa.Ampa,
}
DEFAULT_INPUT_FORMAT = 'autodetect'
DEFAULT_OUTPUT_FORMAT = 'nonlinloc'
DEFAULT_METHOD = 'ampa'
def print_settings(**kwargs):
"""Print settings to stdout.
Args:
args: Command-line input arguments.
"""
sys.stdout.write("\n*** General settings ***\n")
# Print multiprocessing settings
allow_multiprocessing = not kwargs.get('no_multiprocessing')
sys.stdout.write("%30s: %s\n" % ("Allow multiprocessing", allow_multiprocessing))
if allow_multiprocessing:
sys.stdout.write("%30s: %s\n" % ("N. of processes", kwargs.get('processes')))
if kwargs.get('threshold'):
sys.stdout.write("%30s: %s\n" % ("Threshold", kwargs.get('threshold')))
if kwargs.get('threshold'):
sys.stdout.write("%30s: %s\n" % ("Threshold", kwargs.get('threshold')))
sys.stdout.write("%30s: %s\n" % ("Output format", kwargs.get('output_format', '').upper()))
sys.stdout.write("%30s: %s\n" % ("Peak checking(s)", kwargs.get('peak_checking')))
sys.stdout.write("%30s: %s\n" % ("Algorithm used", kwargs.get('method', '').upper()))
sys.stdout.write("%30s: %s\n" % ("Takanami", kwargs.get('takanami')))
sys.stdout.write("%30s: %s\n" % ("Takanami margin", kwargs.get('takanami_margin')))
if kwargs.get('method') == 'ampa':
sys.stdout.write("\n*** AMPA settings ***\n")
sys.stdout.write("%30s: %s\n" % ("Window length(s)", kwargs.get('window')))
sys.stdout.write("%30s: %s\n" % ("Window overlap", kwargs.get('step')))
sys.stdout.write("%30s: %s\n" % ("Noise threshold", kwargs.get('noise_thr')))
sys.stdout.write("%30s: %s\n" % ("Length of the filters used(s)", kwargs.get('L')))
sys.stdout.write("%30s: %s\n" % ("Negative response coefficient", kwargs.get('L_coef')))
sys.stdout.write("%30s: %s\n" % ("Coefficient U", kwargs.get('U')))
sys.stdout.write("\n*** AMPA filter bank settings ***\n")
sys.stdout.write("%30s: %s\n" % ("Start frequency(Hz)", kwargs.get('f_start')))
sys.stdout.write("%30s: %s\n" % ("End frequency(Hz)", kwargs.get('f_end')))
sys.stdout.write("%30s: %s\n" % ("Subband bandwidth(Hz)", kwargs.get('bandwidth')))
sys.stdout.write("%30s: %s\n" % ("Subband overlap(Hz)", kwargs.get('overlap')))
if kwargs.get('method') == 'stalta':
sys.stdout.write("\n*** STA-LTA settings ***\n")
sys.stdout.write("%30s: %s\n" % ("STA window length(s)", kwargs.get('sta_length')))
sys.stdout.write("%30s: %s\n" % ("LTA window length(s)", kwargs.get('lta_length')))
sys.stdout.write("\n")
sys.stdout.flush()
def analysis_single_file_task(filename, **kwargs):
"""
:param file:
:param kwargs:
"""
# Get debug level
debug = kwargs.get('verbosity', 1)
# Configure algorithm
method = METHOD_MAP.get(kwargs.get('method', DEFAULT_METHOD), ampa.Ampa)
alg = method(**kwargs)
# Open input file
if debug:
print "*** Processing file {} ***".format(filename)
input_format = INPUT_FORMAT_MAP.get(kwargs.get('input_format', DEFAULT_INPUT_FORMAT))
stream = rc.read(filename, format=input_format, **kwargs)
if debug:
print "Traces in {}".format(filename)
print stream
# Pick stream traces
for trace in stream.traces:
trace.detect(alg, debug=debug, **kwargs)
# Export picks
ouput_format = OUTPUT_FORMAT_MAP.get(kwargs.get('output_format', DEFAULT_OUTPUT_FORMAT))
extension = OUTPUT_EXTENSION_SET.get(ouput_format, '')
basename, _ = os.path.splitext(os.path.basename(filename))
output_path = kwargs.get('destination_path', os.getcwd())
stream_suffix = '_'.join([suffix for tr in stream.traces
for suffix in tr.getId().split('.')
if suffix != ''])
output_filename = "{}_{}{}".format(basename, stream_suffix, extension)
stream.export_picks(os.path.join(output_path, output_filename), format=ouput_format, debug=debug)
def analysis_chunk_task(parameters):
"""
:param parameters:
"""
file_list = parameters[0]
kwargs = parameters[1]
for file in file_list:
analysis_single_file_task(file, **kwargs)
def analysis(**kwargs):
"""Performs event analysis/picking over a set of seismic signals.
Performs event detection if parameter 'threshold' is not None, otherwise
performs event picking.
"""
# Get file list
file_list = kwargs.pop('FILEIN', [])
# Get debug level
debug = kwargs.get('verbosity', 1)
if debug:
print_settings(**kwargs)
if kwargs.get('no_multiprocessing', False):
analysis_chunk_task((file_list, kwargs))
else:
processes = kwargs.get('processes', multiprocessing.cpu_count())
p = multiprocessing.Pool(processes=processes)
p.map(analysis_chunk_task, itertools.izip(collections.chunkify(file_list, len(file_list) / processes),
itertools.repeat(kwargs)))
p.close()
p.join()
def main(argv=None):
'''Command line options.'''
if argv is None:
argv = sys.argv
else:
sys.argv.extend(argv)
program_name = __import__('__main__').__doc__.split("\n")[0]
program_version = "v%s" % __version__
program_version_message = '%%(prog)s %s' % program_version
program_description = '''
%s %s
A tool to perform event detection/picking over seismic signals.
Analysis can be performed in two ways: supervised or unsupervised mode.
In supervised mode the function graphs each of the candidate events
found and asks the user whether to accept them or not, whereas in
unsupervised mode the function just computes results without receiving
any feedback from users.
Created by Jose Emilio Romero Lopez.
Copyright 2013. All rights reserved.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
''' % (program_name, program_version)
program_examples = '''
Examples of use:
\033[1m>> python apasvo-apasvo-detector.py meq01.bin meq02.bin -f 100 --takanami\033[0m
Given two seismic signals, 'meq01.bin' and 'meq02.bin', sample rate 100 Hz,
performs event picking by using AMPA method (default settings) together with
Takanami method for arrival time refining.
Saves results summary to 'output.csv'.
\033[1m>> python apasvo-detector.py meq01.txt -o nonlinloc -m stalta --lta 60 --takanami\033[0m
Let 'meq01.txt' a text file containing seismic data, performs event picking
with the following settings:
Sample rate: 50 Hz. (Default value).
Picking Algorithm: STA-LTA.
STA window length: 5.0 seconds. (Default value).
LTA window length: 60.0 seconds.
Apply Takanami AR method on results.
Works on supervised mode, showing characteristic function, envelope
function and espectrogram for each possible event.
Saves results to nonlinloc format
\033[1m>> python apasvo-detector.py meq01.bin -t 1.5 --ampa-filters 50.0 25.0 12.5 6.25 --ampa-noise-threshold 75\033[0m
Let 'meq01.bin' a binary file containing seismic data, detects seismic
events whose characteristic function value is over 1.5.
Event detection uses the following settings:
Detection Algorithm: AMPA.
Filter lengths: 50.0 25.0 12.5 6.25 (in seconds).
Noise threshold percentile: 75
Saves results summary to nonlinloc format.
Saves characteristic function to './cf_data/meq01.cf.bin'.
\033[1m>> python apasvo-detector.py meq*.bin --output-format quakeml @settings.txt\033[0m
Performs event picking on all files matching 'meq*.bin' and takes some
settings from a text file named 'settings.txt'.
Saves results to quakeml format.
The following settings are used:
Picking Algorithm: AMPA.
Sliding window length: 200.0 seconds.
Sliding window step: 100.0 seconds. (50 % overlap).
Filter lengths: 50.0, 20.0, 10.0, 6.0, 3.0 (in seconds).
Noise threshold percentile: 75
Frequency range: 4-25 Hz.
So, the following is the content of 'settings.txt':
>> cat settings.txt
-m ampa
--ampa-window 200.0
--ampa-step 100.0
--ampa-filters 50.0 20.0 10.0 6.0 3.0
--ampa-noise-threshold 75
--ampa-f-start 4.0
--ampa-f-end 25.0
'''
try:
# Setup argument parser
parser = parse.CustomArgumentParser(description=program_description,
epilog=program_examples,
formatter_class=argparse.RawDescriptionHelpFormatter,
fromfile_prefix_chars='@')
parser.add_argument('-V', '--version', action='version',
version=program_version_message)
parser.add_argument('-v', '--verbosity',
type=parse.non_negative_int,
default=1,
metavar='<arg>',
help='''
Verbosity level. A value of 0 means no output is printed. Default value is 1.
''')
parser.add_argument("FILEIN", nargs='+',
action=parse.GlobInputFilenames,
metavar='file',
help='''
Binary or text file containing a seismic-like signal.
''')
parser.add_argument("-i", "--input-format",
choices=['binary', 'text', 'sac', 'mseed', 'autodetect'],
default='autodetect',
help='''
Selected format for input files. Default value is autodetect, meaning
format will be inferred for each input file.
''')
parser.add_argument("-o", "--output-format",
choices=['nonlinloc', 'quakeml', 'json'],
default='nonlinloc',
help='''
Output file format for the picked events. Default: 'nonlinloc'.
''')
parser.add_argument("-d", "--destination-path",
metavar='<arg>',
default=os.getcwd(),
help='''
Destination path for output files. By default it will be the current working directory.
''')
parser.add_argument("--no-multiprocessing",
action='store_true',
default=False,
help='''
Do not use multiprocessing during pick estimation.
''')
parser.add_argument("-p", "--processes",
type=parse.positive_int,
default=multiprocessing.cpu_count(),
metavar='<arg>',
help='''
Number of processes to be used during pick estimation. By default it will be equal to
the number of system processors.
''')
parser.add_argument("-m", "--method",
choices=['ampa', 'stalta'],
default='ampa',
help='''
Available event detection/picking algorithms. Default: 'ampa'.
''')
parser.add_argument("-t", "--threshold",
type=parse.positive_float,
metavar='<arg>',
help='''
Local maxima in the characteristic function over this value will
be considered as possible events (detection mode).
If no threshold parameter is provided, the application takes only the
global maximum of the characteristic function (picking mode).
''')
parser.add_argument("--peak-window",
type=parse.positive_float,
default=1.0,
dest='peak_checking',
metavar='<arg>',
help='''
How many seconds on each side of a point of the characteristic
function to use for the comparison to consider the point to be
a local maximum. If no threshold is provided, this parameter has
no effect. Default value is 1 s.
''')
parser.add_argument("-f", "--frequency", type=parse.positive_int,
default=50.0,
dest='fs',
metavar='<arg>',
help='''
Sample rate in Hz (only has effect for binary and text input files). Default: 50 Hz
''')
parser.add_argument("--datatype",
choices=['int16', 'int32', 'int64', 'float16', 'float32', 'float64'],
default='float64',
dest='dtype',
help='''
Data-type of input data (only has effect for binary input files).
Default value is float64, meaning double-precision floating point format.
''')
parser.add_argument("--byteorder",
choices=['little-endian', 'big-endian', 'native'],
default='native',
help='''
If the input files are in binary format this will be the byte-order
of the selected datatype. Default choice is hardware native.
''')
# STA-LTA arguments
sta_lta_options = parser.add_argument_group("STA-LTA settings")
sta_lta_options.add_argument("--sta",
type=parse.positive_float,
dest='sta_length',
default=5.0,
metavar='<arg>',
help='''
Length of STA window (in seconds) when using STA-LTA method.
Default value is 5 seconds.
''')
sta_lta_options.add_argument("--lta",
type=parse.positive_float,
dest='lta_length',
default=100.0,
metavar='<arg>',
help='''
Length of LTA window (in seconds) when using STA-LTA method.
Default value is 100 seconds.
''')
# AMPA arguments
ampa_options = parser.add_argument_group("AMPA settings")
ampa_options.add_argument("--ampa-window",
type=parse.positive_float,
dest='window',
default=100.0,
metavar='<arg>',
help='''
Sliding window length (in seconds) when using AMPA method.
Typically this value should be close to the expected length
of the events sought.
Default: 100 seconds.
''')
ampa_options.add_argument("--ampa-step",
type=parse.positive_float,
dest='step',
default=50.0,
metavar='<arg>',
help='''
Step length in seconds when using AMPA method.
Default: 50 seconds.
''')
ampa_options.add_argument("--ampa-filters",
type=parse.positive_float,
dest='L',
default=[30.0, 20.0, 10.0, 5.0, 2.5],
nargs='+',
metavar='<arg>',
help='''
A list of filter lengths (in seconds) used by AMPA
at the enhancement filter stage.
The length of a filter is related to the duration of the detected
events. An enhancement filter for long duration events can negate
short duration events and vice versa. Combining several filters of
different length the algorithm achieves to deal with this issue.
Default values are 30.0, 20.0, 10.0, 5.0 and 2.5 seconds.
''')
ampa_options.add_argument("--ampa-response-penalty",
type=float,
dest='L_coef',
default=3.0,
metavar='<arg>',
help='''
Penalty factor that minimizes response to emerging or impulsive noise
of the set of filters applied at the enhancement stage.
Default: 3.0.
''')
ampa_options.add_argument("--ampa-noise-threshold",
type=parse.percentile,
dest='noise_thr',
default=90.0,
metavar='<arg>',
help='''
Percentile of the amplitude of the envelope that measures the noise
reduction level for each band at noise reduction stage.
Default: 90.
''')
ampa_options.add_argument("--ampa-f-start",
type=parse.positive_float,
dest='f_start',
default=2.0,
metavar='<arg>',
help='''
Start frequency of the filter bank applied at the adaptive multi-band
processing stage.
Default: 2.0 Hz.
''')
ampa_options.add_argument("--ampa-f-end",
type=parse.positive_float,
dest='f_end',
default=12.0,
metavar='<arg>',
help='''
End frequency of the filter bank applied at the adaptive multi-band
processing stage.
Default: 12.0 Hz.
''')
ampa_options.add_argument("--ampa-bandwidth",
type=parse.positive_float,
dest='bandwidth',
default=3.0,
metavar='<arg>',
help='''
Channel bandwidth of the filter bank applied at the adaptive multi-band
processing stage.
Default: 3.0 Hz.
''')
ampa_options.add_argument("--ampa-overlap",
type=parse.positive_float,
dest='overlap',
default=1.0,
metavar='<arg>',
help='''
Overlap between channels of the filter bank applied at the adaptive
multi-band processing stage.
Default: 1.0 Hz.
''')
ampa_options.add_argument("--ampa-U",
type=float,
dest='U',
default=12.0,
metavar='<arg>',
help='''
A parameter used at the end of the enhancement filter stage to avoid
logarithm of zero and to shift the characteristic function to zero.
Given y(n) the product of the outputs of the different filters used
at the end of the enhancement stage, the characteristic function is
then calculated as:
cf(n) = U + log10(y(n) + 10 ** (-U))
Default: 12.0.
''')
# Takanami arguments
takanami_options = parser.add_argument_group("Takanami settings")
takanami_options.add_argument("--takanami",
action='store_true',
default=False,
help='''
Specifies whether to use Takanami AR method to refine results or not.
''')
takanami_options.add_argument("--takanami-len",
type=parse.positive_float,
dest='takanami_margin',
default=5.0,
metavar='<arg>',
help='''
Given a possible event time point, this parameter specifies the length
of an interval centered at that point where to perform Takanami AR
refining method. I.e. let 't' a possible arrival time and 'w' the value of
the parameter, the application will perform Takanami AR method in
[t - w, t + w].
Default: 5.0 seconds.
''')
# Parse the args and call whatever function was selected
args, _ = parser.parse_known_args()
except Exception, e:
indent = len(program_name) * " "
sys.stderr.write(program_name + ": " + repr(e) + "\n")
sys.stderr.write(indent + " for help use --help\n")
return 2
analysis(**vars(args))
if __name__ == "__main__":
sys.exit(main()) | APASVO | /APASVO-0.0.6.tar.gz/APASVO-0.0.6/bin/apasvo-detector.py | apasvo-detector.py |
import sys
from PySide import QtGui, QtCore
from apasvo._version import _application_name
from apasvo.gui.views import mainwindow
if __name__ == '__main__':
# QtGui.QApplication.setLibraryPaths([]) # Disable looking for plugins
app = QtGui.QApplication(sys.argv)
app.setApplicationName(_application_name)
app.setWindowIcon(QtGui.QIcon(":/app.png"))
# Create and display the splash screen
splash = QtGui.QSplashScreen(QtGui.QPixmap(":splash.png"), QtCore.Qt.WindowStaysOnTopHint)
splash.show()
# Load libraries
splash.showMessage("Loading libraries...")
import matplotlib
matplotlib.rcParams['backend'] = 'qt4agg'
matplotlib.rcParams['backend.qt4'] = 'PySide'
matplotlib.rcParams['patch.antialiased'] = False
matplotlib.rcParams['agg.path.chunksize'] = 80000
import numpy as np
import traceback
from apasvo.gui.views.generated import qrc_icons
from apasvo.gui.delegates import cbdelegate
from apasvo.gui.models import eventlistmodel
from apasvo.gui.models import pickingtask
from apasvo.gui.views import aboutdialog
from apasvo.gui.views import svwidget
from apasvo.gui.views import navigationtoolbar
from apasvo.gui.views import loaddialog
from apasvo.gui.views import savedialog
from apasvo.gui.views import save_events_dialog
from apasvo.gui.views import settingsdialog
from apasvo.gui.views import takanamidialog
from apasvo.gui.views import staltadialog
from apasvo.gui.views import ampadialog
from apasvo.gui.views import playertoolbar
from apasvo.gui.views import error
from apasvo.picking import stalta
from apasvo.picking import ampa
from apasvo.picking import apasvotrace as rc
app.processEvents()
# Create and display the main window
main = mainwindow.MainWindow()
main.show()
splash.finish(main)
try:
app.exec_()
except Exception, e:
error.display_error_dlg(str(e), traceback.format_exc())
sys.exit(1)
sys.exit(0) | APASVO | /APASVO-0.0.6.tar.gz/APASVO-0.0.6/bin/apasvo-gui.py | apasvo-gui.py |
import argparse
import os
import sys
from apasvo._version import __version__
from apasvo.utils import clt, parse, futils
from apasvo.utils.formats import rawfile
from apasvo.picking import eqgenerator
def print_settings(args):
"""Prints settings to stdout.
Args:
args: Command-line input arguments.
"""
sys.stdout.write("\nGeneral settings:\n")
sys.stdout.write("%30s: %s\n" % ("Signal frequency(Hz)",
args.fs))
sys.stdout.write("%30s: %s\n" % ("Length(s)",
args.length))
sys.stdout.write("%30s: %s\n" % ("Start time(s)",
args.t_event))
sys.stdout.write("%30s: %s\n" % ("Noise power(dB)",
args.P_noise_db))
if not args.FILEIN:
sys.stdout.write("%30s: %s\n" % ("Event power(dB)",
args.gen_event_power))
sys.stdout.write("\nFilter bank settings:\n")
sys.stdout.write("%30s: %s\n" % ("Start frequency(Hz)",
args.f_low))
sys.stdout.write("%30s: %s\n" % ("End frequency(Hz)",
args.f_high))
sys.stdout.write("%30s: %s\n" % ("Subband bandwidth(Hz)",
args.bandwidth))
sys.stdout.write("%30s: %s\n" % ("Subband overlap(Hz)",
args.overlap))
sys.stdout.write("%30s: %s\n" % ("Start envelope length(s)",
args.low_period))
sys.stdout.write("%30s: %s\n" % ("End envelope length(s)",
args.high_period))
sys.stdout.write("%30s: %s\n" % ("Start amplitude",
args.low_amp))
sys.stdout.write("%30s: %s\n" % ("End amplitude",
args.high_amp))
sys.stdout.write("\n")
sys.stdout.flush()
def generate(FILEIN, length, t_event, output, gen_event_power=5.0, n_events=1,
gen_noise_coefficients=False, output_format='binary',
datatype='float64', byteorder='native', **kwargs):
"""Generates synthetic earthquake signals with background noise and saves
them to file.
The function accepts a list of command-line arguments and renders synthetic
seismic data in two ways: If a list of input files containing seismic data
is provided, the function generates a new output signal for each one of
the files by adding background noise. If no input file is provided,
the function generates a list of synthetic seismic signals.
Args:
FILEIN: A list of binary or text file objects storing seismic data.
length: Length of rendered seismic signals, in seconds.
If FILEIN is None, this parameter has no effect.
t_event: Start time of rendered earthquake, given in seconds from the
beginning of the signal.
If FILEIN is None, this parameter has no effect.
output: Output file name (absolute path).
If no input file is provided and n_events is greater than 1, the
name of each generated file will be followed by its ordinal number.
E.g. given FILEIN = None, output = 'example.out' and n_events = 5,
the function will generate 5 synthetic files named:
'example00.out', 'example01.out', 'example02.out', 'example03.out'
and 'example04.out'.
gen_event_power: Earthquake power in dB.
If FILEIN is None, this parameter has no effect.
Default: 5.0.
n_events: No. of signals to generate.
If FILEIN is None, this parameter has no effect.
Default: 1.
gen_noise_coefficients: A binary or text file object containing a list
of numeric coefficients of a FIR filter that models the background
noise.
Default value is False, meaning unfiltered white noise is used
to model the background noise.
output_format: Output file format. Possible values are 'binary' or
'text'. Default: 'binary'.
datatype: Data-type of generated data. Default value is 'float64'.
If FILEIN is not None, this parameter is also the datatype of
input data.
byteorder: Byte-order of generated data. Possible values are
'little-endian', 'big-endian' and 'native'.
If FILEIN is not None, this parameter is also the format of
input data.
Default value is 'native'.
"""
fs = kwargs.get('fs', 50.0)
# Configure generator
clt.print_msg("Configuring generator... ")
generator = eqgenerator.EarthquakeGenerator(**kwargs)
clt.print_msg("Done\n")
# Load noise coefficients
if gen_noise_coefficients:
if futils.istextfile(gen_noise_coefficients):
f = open(gen_noise_coefficients, 'r')
else:
f = open(gen_noise_coefficients, 'rb')
clt.print_msg("Loading noise coefficients from %s... " %
f.name)
generator.load_noise_coefficients(f, dtype=datatype,
byteorder=byteorder)
clt.print_msg("Done\n")
# Process input files
basename, ext = os.path.splitext(output)
filename_out = output
# If a list of input files containing seismic data
# is provided, generate a new output signal for each one of
# the files by adding background noise.
if FILEIN:
fileno = 0
for f in FILEIN:
# Read input signal
fin_handler = rawfile.get_file_handler(f, dtype=datatype,
byteorder=byteorder)
clt.print_msg("Loading seismic signal from %s... " %
fin_handler.filename)
signal = fin_handler.read()
clt.print_msg("Done\n")
# Generate output filename
if len(FILEIN) > 1:
filename_out = "%s%02.0i%s" % (basename, fileno, ext)
fileno += 1
clt.print_msg("Generating artificial signal in %s... " %
filename_out)
# Add background noise to signal
eq = generator.generate_noise(signal)
# Save outputs to file
if output_format == 'text':
fout_handler = rawfile.TextFile(filename_out, dtype=datatype,
byteorder=byteorder)
else:
fout_handler = rawfile.BinFile(filename_out, dtype=datatype,
byteorder=byteorder)
fout_handler.write(eq, header="Sample rate: %g Hz." % fs)
clt.print_msg("Done\n")
# If no input file is provided,
# generate a list of synthetic seismic signals.
else:
for i in xrange(n_events):
# Generate output filename
if n_events > 1:
filename_out = "%s%02.0i%s" % (basename, i, ext)
clt.print_msg("Generating artificial signal in %s... " %
filename_out)
# Generate a synthetic signal
eq = generator.generate_earthquake(length, t_event,
gen_event_power)
# Save outputs to file
if output_format == 'text':
fout_handler = rawfile.TextFile(filename_out, dtype=datatype,
byteorder=byteorder)
else:
fout_handler = rawfile.BinFile(filename_out, dtype=datatype,
byteorder=byteorder)
fout_handler.write(eq, header="Sample rate: %g Hz." % fs)
clt.print_msg("Done\n")
def main(argv=None):
'''Command line options.'''
if argv is None:
argv = sys.argv
else:
sys.argv.extend(argv)
program_name = __import__('__main__').__doc__.split("\n")[0]
program_version = "v%s" % __version__
program_version_message = '%%(prog)s %s' % program_version
program_description = '''
%s %s
A tool that generates synthetic seismic signals.
Renders synthetic seismic data in two ways: If a list of input files
containing seismic data is provided, the tool generates a new output
signal for each one of them by adding background noise. If no input
file is provided, it generates a list of synthetic seismic signals.
Artificial earthquakes are generated at desired start point from
white noise band-filtered and modulated by using different envelope
functions for each band.
Similarly, background noise is modeled from white noise and finally
added to the previously generated sequence that contains the synthetic
earthquake.
Created by Jose Emilio Romero Lopez.
Copyright 2013. All rights reserved.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
''' % (program_name, program_version)
program_examples = '''
Examples of use:
\033[1m>> python apasvo-generator.py -o example.out -f 100 -l 600 -t 200 -ep 5 -np 0\033[0m
Generates a synthetic earthquake of the following characteristics:
Earthquake power: 5.0 dB (SNR 5.0)
Noise power: 0.0 dB (SNR 5.0)
Sample rate: 100 Hz.
Length: 600.0 seconds (10 minutes)
Arrival time: 200.0 seconds.
Saves result to a file named 'example.out'
\033[1m>> python apasvo-generator.py meq.bin meq2.txt -f 50 -np 2 -fir coeffs.txt\033[0m
Given two seismic signals, 'meq.bin' and 'meq2.txt', sample rate 50 Hz, adds
background noise of 2.0 dB. Noise is modeled by a FIR filter whose
coefficients are stored in the file 'coeffs.txt'.
Results will be saved to 'eq00.out' and 'eq01.out'.
\033[1m>> python apasvo-generator.py -o eq.bin -n 500 -l 3600 -t 100 -ep 2 -np 2 --f-low 4.0 --f-high 25.0\033[0m
Generates a list of 500 synthetic earthquakes of the following
characteristics:
SNR: 0.0 (2.0 dB signal and noise power)
Sample rate: 50 Hz. (Default value).
Frequency range: 4-25 Hz.
Background noise: Gaussian white noise.
Length: 3600 seconds.
Arrival time: 100.0 seconds.
Results will be saved to 'eq00.bin', 'eq01.bin', 'eq02.bin', ...,
'eq499.bin'.
\033[1m>> python apasvo-generator.py -o eq.txt -n 30 --output-format text @settings.txt\033[0m
Generates a list of 30 synthetic earthquakes and takes some settings from a
text file named 'settings.txt'
Saves them to 'eq00.txt', ..., 'eq29.txt', plain text format.
Rendered signals has the following characteristics:
Earthquake power: 10.0 dB.
Noise power: 0.0 dB.
FIR filter coefficients: 'coeffs.txt'
Length: 1200 seconds.
Arrival time: 250.0 seconds.
So, the following is the content of 'settings.txt':
>> cat settings.txt
-ep 10.0
-np 0.0
-fir coeffs.txt
-l 1200
-t 250
'''
try:
# Setup argument parser
parser = parse.CustomArgumentParser(description=program_description,
epilog=program_examples,
formatter_class=argparse.RawDescriptionHelpFormatter,
fromfile_prefix_chars='@')
parser.add_argument('-V', '--version', action='version',
version=program_version_message)
parser.set_defaults(func=generate)
parser.add_argument("FILEIN", nargs='*',
action=parse.GlobInputFilenames,
metavar='file',
help='''
Binary or text file containing a seismic-like signal.
''')
parser.add_argument("-o", "--output",
default='eq.out',
metavar='<file>',
help='''
Output filename. Default: 'eq.out'.
If no. of output signals is greater than 1, basename of each
generated file will be followed by its ordinal number.
E.g. given parameters '-o example.out' and '-n 5', generates 5 files named:
'example00.out', 'example01.out', 'example02.out', 'example03.out'
and 'example04.out'.
''')
parser.add_argument("-n", "--n-events",
type=parse.positive_int,
default=1,
metavar='<arg>',
help='''
No. of signals to generate. Default: 1.
If input signal is provided, this parameter has no effect.
''')
parser.add_argument("-f", "--frequency", type=parse.positive_int,
default=50,
dest='fs',
metavar='<arg>',
help='''
Sample rate in Hz. Default: 50 Hz.
''')
parser.add_argument("-l", "--length",
type=parse.positive_int,
default=600.0,
metavar='<arg>',
help='''
Length of generated data in seconds.
If input file is provided, this parameter has no effect.
Default: 600.0 seconds.
''')
parser.add_argument("-t", "--t-event",
type=parse.positive_float,
default=50.0,
metavar='<arg>',
help='''
Arrival time in seconds from the beginning of rendered signal.
If input signal is provided, this parameter has no effect.
Default: 50.0 seconds.
''')
parser.add_argument("-ep", "--earthquake-power",
type=float,
dest='gen_event_power',
default=10.0,
metavar='<arg>',
help='''
Earthquake power in dB.
If input signal is provided, this parameter has no effect.
Default: 10.0 dB.
''')
parser.add_argument("-np", "--noise-power",
type=float,
dest='P_noise_db',
default=0.0,
metavar='<arg>',
help='''
Background noise power in dB. Default: 0.0 dB.
''')
parser.add_argument("-fir", "--noise-coefficients",
type=parse.filein,
dest='gen_noise_coefficients',
metavar='<file>',
help='''
Binary or text file containing a list of numeric coefficients of a
FIR filter that characterizes background noise. If not specified
unfiltered white noise is used to model background noise.
''')
parser.add_argument("--f-low",
type=parse.positive_float,
dest='f_low',
default=2.0,
metavar='<arg>',
help='''
Start frequency on multi-band earthquake synthesis.
If input signal is provided, this parameter has no effect.
Default: 2.0 Hz.
''')
parser.add_argument("--f-high",
type=parse.positive_float,
dest='f_high',
default=18.0,
metavar='<arg>',
help='''
End frequency on multi-band earthquake synthesis.
If input signal is provided, this parameter has no effect.
Default: 18.0 Hz.
''')
parser.add_argument("--bandwidth",
type=parse.positive_float,
dest='bandwidth',
default=4.0,
metavar='<arg>',
help='''
Channel bandwidth on multi-band earthquake synthesis.
If input signal is provided, this parameter has no effect.
Default: 4.0 Hz.
''')
parser.add_argument("--overlap",
type=parse.positive_float,
dest='overlap',
default=1.0,
metavar='<arg>',
help='''
Overlap between channels bank on multi-band earthquake synthesis.
If input signal is provided, this parameter has no effect.
Default: 1.0 Hz.
''')
parser.add_argument("--period-low",
type=parse.positive_float,
dest='low_period',
default=50.0,
metavar='<arg>',
help='''
Start value of the range of noise envelope lengths for the
different bands on multi-band earthquake synthesis.
If input signal is provided, this parameter has no effect.
Default: 50.0 seconds.
''')
parser.add_argument("--period-high",
type=parse.positive_float,
dest='high_period',
default=10.0,
metavar='<arg>',
help='''
End value of the range of noise envelope lengths for the
different bands on multi-band earthquake synthesis.
If input signal is provided, this parameter has no effect.
Default: 10.0 seconds.
''')
parser.add_argument("--amplitude-low",
type=parse.positive_float,
dest='low_amp',
default=0.2,
metavar='<arg>',
help='''
Start value of the range of noise envelope amplitudes for the
different bands on multi-band earthquake synthesis.
If input signal is provided, this parameter has no effect.
Default: 0.2.
''')
parser.add_argument("--amplitude-high",
type=parse.positive_float,
dest='high_amp',
default=0.1,
metavar='<arg>',
help='''
End value of the range of noise envelope amplitudes for the
different bands on multi-band earthquake synthesis.
If input signal is provided, this parameter has no effect.
Default: 0.1.
''')
parser.add_argument("--output-format",
choices=["binary", "text"],
default="binary",
help='''
Output file format. Default value is 'binary'.
''')
parser.add_argument("--datatype",
choices=['int16', 'int32', 'int64', 'float16', 'float32', 'float64'],
default='float64',
help='''
Data-type of generated data. If input files are specified, this parameter
is also the data-type of data stored on them.
Default value is float64, meaning double-precision floating point format.
''')
parser.add_argument("--byteorder",
choices=['little-endian', 'big-endian', 'native'],
default='native',
help='''
Byte-ordering for generated data. If input files are specified, this
parameter is also the byte-ordering for data stored on them.
Default value is 'native', meaning platform native byte-ordering.
''')
# Parse the args and call whatever function was selected
args, _ = parser.parse_known_args()
print_settings(args)
except Exception, e:
indent = len(program_name) * " "
sys.stderr.write(program_name + ": " + repr(e) + "\n")
sys.stderr.write(indent + " for help use --help\n")
return 2
args.func(**vars(args))
if __name__ == "__main__":
sys.exit(main()) | APASVO | /APASVO-0.0.6.tar.gz/APASVO-0.0.6/bin/apasvo-generator.py | apasvo-generator.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.