content
stringlengths
10
4.9M
def tgffile_to_graph(tgffilename, elfilename=''): results = [] if not tgffilename: raise ValueError('No tgf filename given.') if elfilename == '': elfilename = tgffilename + '.el' try: elfilename = tgffile_to_edgelist(tgffilename, elfilename) results = edgelistfile_to_graph(elfilename) except OSError: print "File not copied." return results
//This function will find the previous block even if it is an orphan CBlockIndex * GetPreviousBlock(const CBlock& block, int64_t numBlocksBefore) { if(numBlocksBefore <= 0) { if(mapBlockIndex.count(block.GetHash())) { return mapBlockIndex.at(block.GetHash()); } return nullptr; } int64_t count = 1; CBlockIndex * cur = nullptr; if(mapBlockIndex.count(block.hashPrevBlock)) { cur = mapBlockIndex.at(block.hashPrevBlock); } if(!cur) { return nullptr; } while(count < numBlocksBefore ) { cur = cur->pprev; if(!cur) { return nullptr; } count++; } return cur; }
/* Parses the settings from the commandline parameters. */ func LoadSettings() { Settings = SettingsData{} flag.IntVar(&(Settings.Address), "address", 0x68, "The I2C address of the interface we " + "are connecting to.") flag.IntVar(&(Settings.FlexingChannel), "flexingchannel", 1, "The channel of the analog pin " + "where the muscle sensor for the flexing muscle is connected.") flag.IntVar(&(Settings.ExtendingChannel), "extendingchannel", 2, "The channel of the analog" + " pin where the muscle sensor for the extending muscle is connected.") flag.IntVar(&(Settings.MotorIN1), "motorin1", 38, "The GPIO Port where the IN1 channel for the" + " motor driver is connected") flag.IntVar(&(Settings.MotorIN2), "motorin2", 40, "The GPIO Port where the IN2 channel for the" + " motor driver is connected") flag.IntVar(&(Settings.MotorENA), "motorena", 35, "The GPIO Port where the ENA channel for the" + " motor driver is connected") flag.Float64Var(&(Settings.Interval), "interval", 0.1, "The amount of seconds that passes " + "between two measurements") flag.IntVar(&(Settings.SpeedChannel), "speedchannel", 3, "The channel of the analog pin " + "where the potentiometer for controlling the motor speed is connected.") flag.IntVar(&(Settings.Speed), "speed", -1, "The constant speed that the motor should use. " + "If this is negative, the potentiometer is used.") flag.IntVar(&(Settings.Threshold), "threshold", 100, "If a measured value is below the " + "threshold, the muscle is treated as active.") flag.Parse() }
def handle_commands(self, words, vision): try: words[1] except IndexError: return None command = words[1].lower() if command == "play" and len(words) > 2: self.command_handler.play_spotify(words, vision) if command == "pause" and len(words) > 2: self.command_handler.pause_song(words, vision) if command == "resume" and len(words) > 2: self.command_handler.resume_song(words, vision) elif command == "next" and len(words) > 2: self.command_handler.next_song(words, vision) elif command == "previous" and len(words) > 2: self.command_handler.previous_song(words, vision) elif StringUtil.ccs(command, "spotify") and StringUtil.ccs(words[2], "volume"): self.command_handler.change_spotify_volume(words, vision) elif command == "list" and len(words) > 2: self.command_handler.list_commands(words, vision) elif command == "close" and len(words) == 2: vision.stop() elif command == "open" and len(words) > 2: self.command_handler.open_program(words) elif command == "close" and len(words) == 3: self.command_handler.close_program(words) elif command == "text" and len(words) > 2: self.command_handler.text_mode(words, vision) elif command == "login" and len(words) > 2: self.command_handler.program_login(words, vision) elif command == "debug" and len(words) > 2: self.command_handler.change_debug(words, vision) elif command == "profanity" and len(words) > 2: self.command_handler.change_profanity(words, vision) elif command == "search" and len(words) > 2: self.command_handler.search(words, vision)
import { assert } from "chai" import { ECS } from "../src/ECS"; import { TestPositionComponent, TestVelocityComponent } from "./TestComponents"; import { TestPositionSystem, TestVelocitySystem } from "./TestSystems"; describe('ECS', function () { const ecs = new ECS(); const positionSystem = new TestPositionSystem(); const velocitySystem = new TestVelocitySystem(); ecs.registerSystem(positionSystem); ecs.registerSystem(velocitySystem); it("creates entity filled with components if created by createEntity", () => { const entity = ecs.createEntity([ {component: TestPositionComponent}, {component: TestVelocityComponent}]); assert.deepEqual(entity.x, 10); assert.deepEqual(entity.y, 20); assert.deepEqual(entity.dx, 10); assert.deepEqual(entity.dy, 20); }); it("creates entity filled with components if created by createEntity using arguments", () => { let entity = ecs.createEntity([ {component: TestPositionComponent}, {component: TestVelocityComponent, args: [2]}]); assert.deepEqual(entity.x, 10); assert.deepEqual(entity.y, 20); assert.deepEqual(entity.dx, 20); assert.deepEqual(entity.dy, 40); }); // it("creates entity filled with components if created from archetype", () => { // const archetype = ecs.createArchetype([ // {component: TestPositionComponent}, // {component: TestVelocityComponent}]); // const entity = ecs.createEntity(archetype); // // assert.deepEqual(entity.x, 10); // assert.deepEqual(entity.y, 20); // assert.deepEqual(entity.dx, 10); // assert.deepEqual(entity.dy, 20); // }); it("removes entity from views after component removal", () => { const entity = ecs.createEntity([ {component: TestPositionComponent}, {component: TestVelocityComponent}]); assert.includeMembers(positionSystem.view.entities, [entity]); ecs.removeComponentsFromEntity(entity, TestPositionComponent); assert.notInclude(positionSystem.view.entities, [entity]); }); });
package main import ( "fmt" "net/http" "os" "github.com/go-zoo/claw" mw "github.com/go-zoo/claw/middleware" ) func main() { mux := http.NewServeMux() logger := mw.NewLogger(os.Stdout, "[Example]", 2) c := claw.New(logger) stk := claw.NewStack(Middle1, Middle2) mux.HandleFunc("/home", Home) http.ListenAndServe(":8080", c.Merge(mux).Stack(stk)) } func Home(rw http.ResponseWriter, req *http.Request) { rw.Write([]byte("Home Handler\n")) } func Middle1(rw http.ResponseWriter, req *http.Request) { fmt.Printf("FROM MIDDLEWARE 1\n") } func Middle2(rw http.ResponseWriter, req *http.Request) { fmt.Printf("FROM MIDDLEWARE 2\n") } func Useless(rw http.ResponseWriter, req *http.Request) { fmt.Printf("I'M A COMPLETLY USELESS MIDDLEWARE\n") }
import type { PropsWithChildren } from 'react'; import type { Values } from '../../utils/types'; import type { MenuProps } from '../menu'; import { Size } from './types'; export type MenuItemSizeType = Values<typeof Size>; export type MenuItemListType = { key: string; content: string | React.ReactNode; prefix?: string | React.ReactNode | undefined; subList?: MenuItemListType[]; suffix?: string | React.ReactNode | undefined; onClick?: func | undefined; disabled?: boolean; active?: boolean; subMenuProps?: MenuProps; className?: string; }; export interface MenuItemIconProps extends PropsWithChildren, React.HTMLAttributes<HTMLDivElement> { size?: MenuItemSizeType; } export type MenuItemActionsProps = MenuItemIconProps; export interface MenuItemProps extends PropsWithChildren, React.HTMLAttributes<HTMLDivElement> { size?: MenuItemSizeType; icon?: React.ReactNode; active?: boolean; value?: string | number | null; list?: MenuItemListType[] | null | undefined; depth?: number | undefined; }
n=int(input()) l=map(int,input().split()) l1=list(l) t=l1.index(max(l1)) sec=0 sec+=t l1.reverse() t1=l1.index(min(l1)) sec+=t1 if(t<(n-t1)): print(sec) else: if(sec<=0): print("0") else: print(sec-1)
import { ContainerConfiguration, Scope } from 'typescript-ioc'; import { HelloWorldApi } from './hello-world.api'; import { HelloWorldService } from './hello-world.service'; const config: ContainerConfiguration[] = [ { bind: HelloWorldApi, to: HelloWorldService, scope: Scope.Singleton, }, ]; export default config;
<gh_stars>0 from .unc_data_loader import unc_data_loader import numpy as np def unc_data_loader_2_groups(args): group_data_ret = unc_data_loader(args) # print("group_data_ret.shape: ", group_data_ret) group_AD = group_data_ret[0] group_LMCI = group_data_ret[3] group_CN = group_data_ret[1] group_EMCI = group_data_ret[2] group_AD_update = np.append(group_AD, group_LMCI, axis=0) group_CN_update = np.append(group_CN, group_EMCI, axis=0) group_data_2_groups = {} group_data_2_groups[0] = group_AD_update group_data_2_groups[1] = group_CN_update return group_data_2_groups
/** * Marshall the given parameter object, and output to a SdkJsonGenerator */ public void marshall(Disk disk, StructuredJsonGenerator jsonGenerator) { if (disk == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { jsonGenerator.writeStartObject(); if (disk.getDiskId() != null) { jsonGenerator.writeFieldName("DiskId").writeValue(disk.getDiskId()); } if (disk.getDiskPath() != null) { jsonGenerator.writeFieldName("DiskPath").writeValue(disk.getDiskPath()); } if (disk.getDiskNode() != null) { jsonGenerator.writeFieldName("DiskNode").writeValue(disk.getDiskNode()); } if (disk.getDiskStatus() != null) { jsonGenerator.writeFieldName("DiskStatus").writeValue(disk.getDiskStatus()); } if (disk.getDiskSizeInBytes() != null) { jsonGenerator.writeFieldName("DiskSizeInBytes").writeValue(disk.getDiskSizeInBytes()); } if (disk.getDiskAllocationType() != null) { jsonGenerator.writeFieldName("DiskAllocationType").writeValue(disk.getDiskAllocationType()); } if (disk.getDiskAllocationResource() != null) { jsonGenerator.writeFieldName("DiskAllocationResource").writeValue(disk.getDiskAllocationResource()); } jsonGenerator.writeEndObject(); } catch (Throwable t) { throw new SdkClientException("Unable to marshall request to JSON: " + t.getMessage(), t); } }
def timeout( timeout: float, func: Callable[..., Any], args: Tuple[Any, ...] = (), kwargs: Dict[str, Any] = {}, ) -> Any: class FuncThread(threading.Thread): def __init__(self, bucket: queue.Queue) -> None: threading.Thread.__init__(self) self.result: Any = None self.bucket: queue.Queue = bucket self.err: Optional[Exception] = None def run(self) -> None: try: self.result = func(*args, **kwargs) except Exception as err: self.bucket.put(sys.exc_info()) self.err = err bucket: queue.Queue = queue.Queue() it = FuncThread(bucket) it.start() it.join(timeout) if it.is_alive(): raise TimeoutError() else: try: _, _, exc_trace = bucket.get(block=False) except queue.Empty: return it.result else: raise it.err.with_traceback(exc_trace)
/* Hook to validate the current #pragma GCC target and set the state, and update the macros based on what was changed. If ARGS is NULL, then POP_TARGET is used to reset the options. */ static bool s390_pragma_target_parse (tree args, tree pop_target) { tree prev_tree = build_target_option_node (&global_options); tree cur_tree; if (! args) cur_tree = pop_target; else { cur_tree = s390_valid_target_attribute_tree (args, &global_options, &global_options_set, true); if (!cur_tree || cur_tree == error_mark_node) { cl_target_option_restore (&global_options, TREE_TARGET_OPTION (prev_tree)); return false; } } target_option_current_node = cur_tree; s390_activate_target_options (target_option_current_node); { struct cl_target_option *prev_opt; struct cl_target_option *cur_opt; prev_opt = TREE_TARGET_OPTION (prev_tree); cur_opt = TREE_TARGET_OPTION (cur_tree); cpp_options *cpp_opts = cpp_get_options (parse_in); unsigned char saved_warn_unused_macros = cpp_opts->warn_unused_macros; cpp_opts->warn_unused_macros = 0; s390_cpu_cpp_builtins_internal (parse_in, cur_opt, prev_opt); cpp_opts->warn_unused_macros = saved_warn_unused_macros; } return true; }
def filter_by_logged_object(self): return { self.__class__.__name__ + '_uuid': self.uuid.hex }
// // Created by k.leyfer on 11.09.2017. // #ifndef TOUCHLOGGER_DIRTY_RAWPOINTERDATA_H #define TOUCHLOGGER_DIRTY_RAWPOINTERDATA_H #include <stdint.h> #include "BitSet.h" #include "../common.h" /* Raw data for a collection of pointers including a pointer id mapping table. */ struct RawPointerData { struct Pointer { uint32_t id; int32_t x; int32_t y; int32_t pressure; int32_t touchMajor; int32_t touchMinor; int32_t toolMajor; int32_t toolMinor; int32_t orientation; int32_t distance; int32_t tiltX; int32_t tiltY; }; uint32_t pointerCount; Pointer pointers[MAX_POINTERS]; BitSet32 touchingIdBits; uint32_t idToIndex[MAX_POINTER_ID + 1]; RawPointerData(); void clear(); void copyFrom(const RawPointerData &other); void getCentroidOfTouchingPointers(float* outX, float* outY) const; inline void markIdBit(uint32_t id) { touchingIdBits.markBit(id); } inline void clearIdBits() { touchingIdBits.clear(); } inline const Pointer &pointerForId(uint32_t id) const { return pointers[idToIndex[id]]; } }; #endif //TOUCHLOGGER_DIRTY_RAWPOINTERDATA_H
/** * A ViewHolder containing views for an alarm item in collapsed stated. */ public final class CollapsedAlarmViewHolder extends AlarmTimeViewHolder { public final TextView alarmLabel; public final TextView daysOfWeek; public final TextView upcomingInstanceLabel; public final View hairLine; public CollapsedAlarmViewHolder(View itemView, final AlarmTimeClickHandler alarmTimeClickHandler, final AlarmTimeAdapter alarmTimeAdapter) { super(itemView, alarmTimeClickHandler); alarmLabel = (TextView) itemView.findViewById(R.id.label); daysOfWeek = (TextView) itemView.findViewById(R.id.days_of_week); upcomingInstanceLabel = (TextView) itemView.findViewById(R.id.upcoming_instance_label); hairLine = itemView.findViewById(R.id.hairline); // Expand handler itemView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { alarmTimeAdapter.expand(getAdapterPosition()); } }); alarmLabel.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { alarmTimeAdapter.expand(getAdapterPosition()); } }); arrow.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { alarmTimeAdapter.expand(getAdapterPosition()); } }); // Edit time handler clock.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { alarmTimeClickHandler.onClockClicked(mAlarm); alarmTimeAdapter.expand(getAdapterPosition()); } }); } @Override public void bindAlarm(Context context, Alarm alarm, AlarmInstance alarmInstance) { setData(alarm, alarmInstance); bindOnOffSwitch(context, alarm); bindClock(context, alarm); bindRepeatText(context, alarm); bindReadOnlyLabel(context, alarm); bindUpcomingInstance(context, alarm); boolean boundPreemptiveDismiss = bindPreemptiveDismissButton(context, alarm, alarmInstance); hairLine.setVisibility(boundPreemptiveDismiss ? View.GONE : View.VISIBLE); itemView.setContentDescription(clock.getContentDescription() + " " + alarm.getLabelOrDefault(context)); } private void bindReadOnlyLabel(Context context, Alarm alarm) { if (alarm.label != null && alarm.label.length() != 0) { alarmLabel.setText(alarm.label); alarmLabel.setVisibility(View.VISIBLE); alarmLabel.setContentDescription(context.getString(R.string.label_description) + " " + alarm.label); } else { alarmLabel.setVisibility(View.GONE); } } private void bindRepeatText(Context context, Alarm alarm) { final String daysOfWeekText = alarm.daysOfWeek.toString(context, Utils.getFirstDayOfWeek(context)); if (!TextUtils.isEmpty(daysOfWeekText)) { daysOfWeek.setText(daysOfWeekText); daysOfWeek.setContentDescription(alarm.daysOfWeek.toAccessibilityString( context, Utils.getFirstDayOfWeek(context))); daysOfWeek.setVisibility(View.VISIBLE); } else { daysOfWeek.setVisibility(View.GONE); } } private void bindUpcomingInstance(Context context, Alarm alarm) { if (alarm.daysOfWeek.isRepeating()) { upcomingInstanceLabel.setVisibility(View.GONE); } else { upcomingInstanceLabel.setVisibility(View.VISIBLE); final String labelText = Alarm.isTomorrow(alarm, Calendar.getInstance()) ? context.getString(R.string.alarm_tomorrow) : context.getString(R.string.alarm_today); upcomingInstanceLabel.setText(labelText); } } }
A Mount Allison University professor says there is still a lot of research to be done on the health benefits of marijuana compared to its adverse effects. Karen Crosby is an assistant professor of biology whose research focuses on how the brain regulates appetite within the hypothalamus. Crosby led a discussion about cannabinoids — chemical compounds found in the cannabis plant — at an information session at the university on Thursday. She says some cannabinoids have mind-altering effects, and some don't. Crosby says there is strong evidence cannabinoids can help with chronic pain, nausea associated with chemotherapy treatment and spasms associated with multiple sclerosis. "The adverse effects tend to be seen with really prolonged use and high use," she said. "I think the best example of that is with the younger population, the adolescent and going into the early 20s, where we see prolonged cannabinoid or cannabis use can increase the risk of schizophrenia, major depressive disorder, different disorders like that." Marijuana will be legalized in Canada on July 1, 2018. Claire West (left) and Emily Poole (right) are both biology students at Mount Allison University. They both think people need more information about marijuana and its effects. (Kate Letterick/CBC News ) Third-year biology student Emily Poole, who attended the information session, says it's a relevant topic. She feels a lot of young people don't understand the effects of marijuana and won't take the time to consider the risks once it's legalized. "I think one of the big things is definitely the development factors and that's a component that so many people don't know about," she said. "I would say that's the biggest thing … yes, there are therapeutic effects but you have to know when it's going to be therapeutic and when it's going to be detrimental." Fourth-year biology student Claire West agreed that people need more information. "There are people who don't have a clue and I think that's the biggest issue—you can choose to do whatever you want to your body but I think its important to know what you're doing," she said. "If you do know and you want to go for it anyways, that's totally your choice but I think it's important to know what you're doing." More research needed Crosby says more research is needed to figure out how to target cannabinoids and their positive effects more reliably. "Tons, tons. Because the receptors to which cannabinoids bind … they're just everywhere, it's hard to come up with drugs that have benefits in clinical trials in humans." she said. "We need to be able to target certain populations of these receptors maybe in one brain region alone or in the gut alone and I think then we would begin to see more beneficial effects or at least clinical trials making their way through and more drugs hitting the market." Crosby hopes additional money and resources will be directed to more studies. "I think just even the anticipation of legalization has increased the amount of research in this area," Crosby said.
// processTemplate handles injecting data into specified template file. func processTemplate(t *testing.T, name string, data interface{}) (string, error) { t.Helper() tmpl, err := template.New(name).ParseFiles(path.Join("..", "acceptance", "data", "golden", name)) if err != nil { t.Fail() return "", fmt.Errorf("error: unable to parse template file '%s': %v", name, err) } var b bytes.Buffer err = tmpl.Execute(&b, data) if err != nil { return "", fmt.Errorf("unable to execute template: %v", err) } return strings.TrimSpace(b.String()), nil }
<filename>paradocx/__init__.py from paradocx.util import w from paradocx.package import WordPackage class Document(WordPackage): def paragraph(self, text=None, style=None): p = paragraph(text, style=style) self.start_part.append(p) return p def table(self, data=None, style=None): tbl = table(data, style=style) self.start_part.append(tbl) return tbl @property def paragraphs(self): return self.start_part.body.findall(w['p']) def run(text=None, bold=False, italic=False, font=None): rPr = w.rPr() if bold: rPr.append(w.b()) if italic: rPr.append(w.i()) if font: rFont = w.rFont() rFont.attrib[w['ascii']] = font rPr.append(rFont) r = w.r() if len(rPr): r.append(rPr) if text: r.append(w.t(str(text))) return r def paragraph(text=None, style=None, pagebreak=None): p = w.p() subs = [] pPr = w.pPr() if style: s = w.pStyle() s.attrib[w['val']] = style pPr.append(s) if pagebreak: pPr.append(w.sectPr()) if len(pPr): subs.append(pPr) if text: if isinstance(text, str): text = str(text) subs.append(w.r(w.t(text))) elif hasattr(text, 'tag'): subs.append(text) p.extend(subs) return p def table(data=None, style=None): tbl = w.tbl() tblPr = w.tblPr() tbl.append(tblPr) data = data or [] for cells in data: tbl.append(w.tr(*[w.tc(paragraph(value)) for value in cells])) if style: s = w.tblStyle() s.attrib[w['val']] = style tblPr.append(s) return tbl
n = input() v = list(map(int, raw_input().split())) configs = 1 for x in set(v): configs *= v.count(x) if configs < 3: print "NO" exit() print "YES" v = sorted([(v[i], i) for i in range(0, len(v))]) def display(): print ' '.join([str(x[1]+1) for x in v]) display() equals = [] for i in range(0, len(v)-1): if v[i][0] == v[i+1][0]: equals.append(i) x = equals[0] v[x], v[x+1] = v[x+1], v[x] display() x = equals[1] v[x], v[x+1] = v[x+1], v[x] display()
import math N = int(input()) A = list(map(int, input().split())) A = sorted(A) maxA = A[len(A)-1] valid = [True] * (maxA+1) for i in range(len(A)): if valid[A[i]] == False: continue if i != 0 and A[i-1] == A[i]: valid[A[i]] = False else: for j in range(A[i]*2, maxA+1, A[i]): valid[j] = False count = 0 for i in range(len(A)): if valid[A[i]] == True: count += 1 print(count)
/** * Verifies collection generic types or array types are introspected as * properties according to specification rules */ @Test public void testPropertyCollectionType() throws Exception { JavaImplementation type = javaImplementationFactory.createJavaImplementation(); Constructor<PropertyCollectionImpl> ctor = PropertyCollectionImpl.class.getConstructor(); type.setConstructor(new JavaConstructorImpl<PropertyCollectionImpl>(ctor)); processor.visitEnd(PropertyCollectionImpl.class, type); assertEquals(0, type.getReferences().size()); assertEquals(4, type.getProperties().size()); }
// StartTorrentSyncing is an endless loop that uses torrents to sync missing blocks // It will grab any block higher than the highest dblock saved in the database up // to the highest known block. func (s *State) StartTorrentSyncing() error { if !s.UsingTorrent() { return fmt.Errorf("State is not using torrents, yet torrent sync was called") } for !s.DBFinished { time.Sleep(1 * time.Second) } var done uint32 = 1 for { if s.IsLeader() || s.TorrentUploader() { if done < s.EntryDBHeightComplete { for done < s.EntryDBHeightComplete { s.UploadDBState(done) done++ } } else { if s.EntryDBHeightComplete > 0 && s.GetHighestKnownBlock() == s.EntryDBHeightComplete { time.Sleep(30 * time.Second) } time.Sleep(5 * time.Second) } continue } rightDuration := time.Duration(time.Second * 1) dblock, err := s.DB.FetchDBlockHead() if err != nil || dblock == nil { if err != nil { log.Printf("[TorrentSync] Error while retrieving dblock head, %s", err.Error()) } time.Sleep(5 * time.Second) continue } allowed := 3000 lower := s.GetHighestSavedBlk() upper := s.GetHighestKnownBlock() if upper-(BATCH_SIZE*2) < lower { lower = s.EntryDBHeightComplete + 1 allowed = 1750 } if s.InMsgQueue().Length() > 3000 || s.HighestCompletedTorrent > lower+3500 { if s.HighestCompletedTorrent > lower+500 { allowed = 1750 } else { allowed = 2500 } } if upper == 0 { time.Sleep(5 * time.Second) continue } if lower == upper || upper-BATCH_SIZE < lower { time.Sleep(20 * time.Second) continue } stateTorrentSyncingLower.Set(float64(lower)) stateTorrentSyncingUpper.Set(float64(upper)) max := lower + uint32(allowed) if upper < max { rightDuration = time.Duration(5 * time.Second) max = upper } var u uint32 = 0 RequestLoop: for u = lower; u < max; u++ { if (upper - BATCH_SIZE) < u { break RequestLoop } err := s.DBStateManager.RetrieveDBStateByHeight(u) if err != nil { if s.DBStateManager.Alive() == nil { log.Printf("[TorrentSync] Error while retrieving height %d by torrent, %s", u, err.Error()) } else { log.Println("Torrent plugin has stopped in TorrentSync") time.Sleep(10 * time.Second) } } } if lower > s.EntryBlockDBHeightComplete { s.DBStateManager.RetrieveDBStateByHeight(s.EntryDBHeightComplete + 1) } s.DBStateManager.CompletedHeightTo(s.EntryDBHeightComplete) time.Sleep(rightDuration) } }
#include "version.h" #include "config.h" #include "cpuid.h" #include <stdio.h> #include <string.h> #include <unistd.h> #include <fcntl.h> #include <sys/stat.h> #include <sys/time.h> __ID("@(#) $Id: cpuid.cc 2433 2012-01-10 22:01:30Z lyonel $"); #if defined(__i386__) || defined(__alpha__) static hwNode *getcache(hwNode & node, int n = 0) { char cachename[10]; hwNode *cache = NULL; if (n < 0) n = 0; snprintf(cachename, sizeof(cachename), "cache:%d", n); cache = node.getChild(string(cachename)); if (cache) return cache; // "cache:0" is equivalent to "cache" if we only have L1 cache if ((n == 0) && (node.countChildren(hw::memory) <= 1)) cache = node.getChild(string("cache")); if (cache) return cache; else return NULL; } static hwNode *getcpu(hwNode & node, int n = 0) { char cpubusinfo[10]; hwNode *cpu = NULL; if (n < 0) n = 0; snprintf(cpubusinfo, sizeof(cpubusinfo), "cpu@%d", n); cpu = node.findChildByBusInfo(cpubusinfo); if (cpu) return cpu; if (n > 0) return NULL; hwNode *core = node.getChild("core"); if (core) { hwNode cpu("cpu", hw::processor); cpu.setBusInfo(cpubusinfo); cpu.addHint("icon", string("cpu")); cpu.claim(); return core->addChild(cpu); } else return NULL; } #endif // __i386__ || __alpha__ #ifdef __i386__ /* %ebx may be the PIC register. */ #define cpuid_up(in,a,b,c,d)\ __asm__ ("xchgl\t%%ebx, %1\n\t" \ "cpuid\n\t" \ "xchgl\t%%ebx, %1\n\t" \ : "=a" (a), "=r" (b), "=c" (c), "=d" (d) \ : "0" (in)) static void cpuid(int cpunumber, unsigned long idx, unsigned long &eax, unsigned long &ebx, unsigned long &ecx, unsigned long &edx) { char cpuname[50]; int fd = -1; unsigned char buffer[16]; snprintf(cpuname, sizeof(cpuname), "/dev/cpu/%d/cpuid", cpunumber); fd = open(cpuname, O_RDONLY); if (fd >= 0) { lseek(fd, idx, SEEK_CUR); memset(buffer, 0, sizeof(buffer)); if(read(fd, buffer, sizeof(buffer)) == sizeof(buffer)) { eax = (*(unsigned long *) buffer); ebx = (*(unsigned long *) (buffer + 4)); ecx = (*(unsigned long *) (buffer + 8)); edx = (*(unsigned long *) (buffer + 12)); } close(fd); } else cpuid_up(idx, eax, ebx, ecx, edx); } /* Decode Intel TLB and cache info descriptors */ static void decode_intel_tlb(int x, long long &l1cache, long long &l2cache) { x &= 0xff; switch (x) { case 0: break; case 0x1: // Instruction TLB: 4KB pages, 4-way set assoc, 32 entries break; case 0x2: // Instruction TLB: 4MB pages, 4-way set assoc, 2 entries break; case 0x3: // Data TLB: 4KB pages, 4-way set assoc, 64 entries break; case 0x4: // Data TLB: 4MB pages, 4-way set assoc, 8 entries break; case 0x6: // 1st-level instruction cache: 8KB, 4-way set assoc, 32 byte line size l1cache += 8 * 1024; break; case 0x8: // 1st-level instruction cache: 16KB, 4-way set assoc, 32 byte line size l1cache += 16 * 1024; break; case 0xa: // 1st-level data cache: 8KB, 2-way set assoc, 32 byte line size l1cache += 8 * 1024; break; case 0xc: // 1st-level data cache: 16KB, 4-way set assoc, 32 byte line size l1cache += 16 * 1024; break; case 0x40: // No 2nd-level cache, or if 2nd-level cache exists, no 3rd-level cache break; case 0x41: // 2nd-level cache: 128KB, 4-way set assoc, 32 byte line size l2cache = 128 * 1024; break; case 0x42: // 2nd-level cache: 256KB, 4-way set assoc, 32 byte line size l2cache = 256 * 1024; break; case 0x43: // 2nd-level cache: 512KB, 4-way set assoc, 32 byte line size l2cache = 512 * 1024; break; case 0x44: // 2nd-level cache: 1MB, 4-way set assoc, 32 byte line size l2cache = 1024 * 1024; break; case 0x45: // 2nd-level cache: 2MB, 4-way set assoc, 32 byte line size l2cache = 2 * 1024 * 1024; break; case 0x50: // Instruction TLB: 4KB and 2MB or 4MB pages, 64 entries break; case 0x51: // Instruction TLB: 4KB and 2MB or 4MB pages, 128 entries break; case 0x52: // Instruction TLB: 4KB and 2MB or 4MB pages, 256 entries break; case 0x5b: // Data TLB: 4KB and 4MB pages, 64 entries break; case 0x5c: // Data TLB: 4KB and 4MB pages, 128 entries break; case 0x5d: // Data TLB: 4KB and 4MB pages, 256 entries break; case 0x66: // 1st-level data cache: 8KB, 4-way set assoc, 64 byte line size l1cache += 8 * 1024; break; case 0x67: // 1st-level data cache: 16KB, 4-way set assoc, 64 byte line size l1cache += 16 * 1024; break; case 0x68: // 1st-level data cache: 32KB, 4-way set assoc, 64 byte line size l1cache += 32 * 1024; break; case 0x70: // Trace cache: 12K-micro-op, 4-way set assoc break; case 0x71: // Trace cache: 16K-micro-op, 4-way set assoc break; case 0x72: // Trace cache: 32K-micro-op, 4-way set assoc break; case 0x79: // 2nd-level cache: 128KB, 8-way set assoc, sectored, 64 byte line size l2cache += 128 * 1024; break; case 0x7a: // 2nd-level cache: 256KB, 8-way set assoc, sectored, 64 byte line size l2cache += 256 * 1024; break; case 0x7b: // 2nd-level cache: 512KB, 8-way set assoc, sectored, 64 byte line size l2cache += 512 * 1024; break; case 0x7c: // 2nd-level cache: 1MB, 8-way set assoc, sectored, 64 byte line size l2cache += 1024 * 1024; break; case 0x82: // 2nd-level cache: 256KB, 8-way set assoc, 32 byte line size l2cache += 256 * 1024; break; case 0x83: // 2nd-level cache: 512KB, 8-way set assoc 32 byte line size l2cache += 512 * 1024; break; case 0x84: // 2nd-level cache: 1MB, 8-way set assoc, 32 byte line size l2cache += 1024 * 1024; break; case 0x85: // 2nd-level cache: 2MB, 8-way set assoc, 32 byte line size l2cache += 2 * 1024 * 1024; break; default: // unknown TLB/cache descriptor break; } } static bool dointel(unsigned long maxi, hwNode * cpu, int cpunumber = 0) { char buffer[1024]; unsigned long signature = 0, flags = 0, bflags = 0, eax = 0, ebx = 0, ecx = 0, edx = 0, unused = 0; int stepping, model, family; if (!cpu) return false; cpu->addHint("logo", string("intel")); if (maxi >= 1) { cpuid(cpunumber, 1, eax, ebx, ecx, edx); signature = eax; stepping = eax & 0xf; model = (eax >> 4) & 0xf; family = (eax >> 8) & 0xf; flags = edx; bflags = ebx; snprintf(buffer, sizeof(buffer), "%d.%d.%d", family, model, stepping); cpu->setVersion(buffer); if(ecx & (1 << 5)) cpu->addCapability("vmx", _("CPU virtualization (Vanderpool)")); /* Hyper-Threading Technology */ if (flags & (1 << 28)) { char buff[20]; unsigned int nr_ht = (bflags >> 16) & 0xFF; unsigned int phys_id = (bflags >> 24) & 0xFF; snprintf(buff, sizeof(buff), "%d", phys_id); cpu->setConfig("id", buff); hwNode logicalcpu("logicalcpu", hw::processor); logicalcpu.setDescription(_("Logical CPU")); logicalcpu.addCapability("logical", _("Logical CPU")); logicalcpu.setWidth(cpu->getWidth()); logicalcpu.claim(); cpu->addCapability("ht", _("HyperThreading")); if(nr_ht>1) for(unsigned int i=0; i< nr_ht; i++) { snprintf(buff, sizeof(buff), "CPU:%d.%d", phys_id, i); logicalcpu.setHandle(buff); logicalcpu.setPhysId(phys_id, i+1); cpu->addChild(logicalcpu); cpu->claim(); } } } if (maxi >= 2) { /* * Decode TLB and cache info */ int ntlb, i; long long l1cache = 0, l2cache = 0; ntlb = 255; for (i = 0; i < ntlb; i++) { cpuid(cpunumber, 2, eax, ebx, ecx, edx); ntlb = eax & 0xff; decode_intel_tlb(eax >> 8, l1cache, l2cache); decode_intel_tlb(eax >> 16, l1cache, l2cache); decode_intel_tlb(eax >> 24, l1cache, l2cache); if ((ebx & 0x80000000) == 0) { decode_intel_tlb(ebx, l1cache, l2cache); decode_intel_tlb(ebx >> 8, l1cache, l2cache); decode_intel_tlb(ebx >> 16, l1cache, l2cache); decode_intel_tlb(ebx >> 24, l1cache, l2cache); } if ((ecx & 0x80000000) == 0) { decode_intel_tlb(ecx, l1cache, l2cache); decode_intel_tlb(ecx >> 8, l1cache, l2cache); decode_intel_tlb(ecx >> 16, l1cache, l2cache); decode_intel_tlb(ecx >> 24, l1cache, l2cache); } if ((edx & 0x80000000) == 0) { decode_intel_tlb(edx, l1cache, l2cache); decode_intel_tlb(edx >> 8, l1cache, l2cache); decode_intel_tlb(edx >> 16, l1cache, l2cache); decode_intel_tlb(edx >> 24, l1cache, l2cache); } } if (l1cache != 0) { hwNode *l1 = getcache(*cpu, 0); hwNode *l2 = getcache(*cpu, 1); if (l1) { l1->setSize(l1cache); if (l1->getDescription() == "") l1->setDescription(_("L1 cache")); } else { hwNode cache("cache", hw::memory); cache.setSize(l1cache); cache.setDescription(_("L1 cache")); cpu->addChild(cache); } if (l2cache != 0) { if (l2 && (l2cache != 0)) { l2->setSize(l2cache); if (l2->getDescription() == "") l2->setDescription(_("L2 cache")); } else { hwNode cache("cache", hw::memory); cache.setSize(l2cache); cache.setDescription(_("L2 cache")); cpu->addChild(cache); } } } } if (maxi >= 3) { cpuid(cpunumber, 3, unused, unused, ecx, edx); snprintf(buffer, sizeof(buffer), "%04lX-%04lX-%04lX-%04lX-%04lX-%04lX", signature >> 16, signature & 0xffff, edx >> 16, edx & 0xffff, ecx >> 16, ecx & 0xffff); cpu->setSerial(buffer); } else cpu->setSerial(""); return true; } static bool doamd(unsigned long maxi, hwNode * cpu, int cpunumber = 0) { unsigned long maxei = 0, eax, ebx, ecx, edx; long long l1cache = 0, l2cache = 0; unsigned int family = 0, model = 0, stepping = 0; char buffer[1024]; if (maxi < 1) return false; cpu->addHint("logo", string("amd")); cpuid(cpunumber, 1, eax, ebx, ecx, edx); stepping = eax & 0xf; model = (eax >> 4) & 0xf; family = (eax >> 8) & 0xf; snprintf(buffer, sizeof(buffer), "%d.%d.%d", family, model, stepping); cpu->setVersion(buffer); cpuid(cpunumber, 0x80000000, maxei, ebx, ecx, edx); if (maxei >= 0x80000005) { cpuid(cpunumber, 0x80000005, eax, ebx, ecx, edx); l1cache = (ecx >> 24) * 1024; // data cache l1cache += (edx >> 24) * 1024; // instruction cache } if (maxei >= 0x80000006) { cpuid(cpunumber, 0x80000006, eax, ebx, ecx, edx); l2cache = (ecx >> 16) * 1024; } if (l1cache != 0) { hwNode *l1 = cpu->getChild("cache:0"); hwNode *l2 = cpu->getChild("cache:1"); if (l1) l1->setSize(l1cache); else { hwNode newl1("cache", hw::memory); newl1.setDescription(_("L1 cache")); newl1.setSize(l1cache); cpu->addChild(newl1); } if (l2 && l2cache) l2->setSize(l2cache); else { hwNode newl2("cache", hw::memory); newl2.setDescription(_("L2 cache")); newl2.setSize(l2cache); if (l2cache) cpu->addChild(newl2); } } return true; } static bool docyrix(unsigned long maxi, hwNode * cpu, int cpunumber = 0) { unsigned long eax, ebx, ecx, edx; unsigned int family = 0, model = 0, stepping = 0; char buffer[1024]; if (maxi < 1) return false; cpuid(cpunumber, 1, eax, ebx, ecx, edx); stepping = eax & 0xf; model = (eax >> 4) & 0xf; family = (eax >> 8) & 0xf; snprintf(buffer, sizeof(buffer), "%d.%d.%d", family, model, stepping); cpu->setVersion(buffer); return true; } static __inline__ bool flag_is_changeable_p(unsigned int flag) { unsigned int f1, f2; __asm__ volatile ("pushfl\n\t" "pushfl\n\t" "popl %0\n\t" "movl %0,%1\n\t" "xorl %2,%0\n\t" "pushl %0\n\t" "popfl\n\t" "pushfl\n\t" "popl %0\n\t" "popfl\n\t":"=&r" (f1), "=&r"(f2):"ir"(flag)); return ((f1 ^ f2) & flag) != 0; } static bool haveCPUID() { return flag_is_changeable_p(0x200000); } /* * Estimate CPU MHz routine by <NAME> <<EMAIL>> * Small changes by <NAME> <<EMAIL>> * */ static __inline__ unsigned long long int rdtsc() { unsigned long long int x; __asm__ volatile (".byte 0x0f, 0x31":"=A" (x)); return x; } static float estimate_MHz(int cpunum, long sleeptime = 250000) { struct timezone tz; struct timeval tvstart, tvstop; unsigned long long int cycles[2]; /* gotta be 64 bit */ float microseconds; /* total time taken */ unsigned long eax, ebx, ecx, edx; double freq = 1.0f; /* * Make sure we have a TSC (and hence RDTSC) */ cpuid(cpunum, 1, eax, ebx, ecx, edx); if ((edx & (1 << 4)) == 0) { return 0; // can't estimate frequency } memset(&tz, 0, sizeof(tz)); /* * get this function in cached memory */ gettimeofday(&tvstart, &tz); cycles[0] = rdtsc(); gettimeofday(&tvstart, &tz); /* * we don't trust that this is any specific length of time */ usleep(sleeptime); gettimeofday(&tvstop, &tz); cycles[1] = rdtsc(); gettimeofday(&tvstop, &tz); microseconds = (tvstop.tv_sec - tvstart.tv_sec) * 1000000 + (tvstop.tv_usec - tvstart.tv_usec); return (float) (cycles[1] - cycles[0]) / (microseconds / freq); } static float average_MHz(int cpunum, int tries = 2) { float frequency = 0; for (int i = 1; i <= tries; i++) frequency += estimate_MHz(cpunum, i * 150000); if (tries > 0) return frequency / (float) tries; else return 0; } static long round_MHz(float fMHz) { long MHz = (long)fMHz; if ((MHz % 50) > 15) return ((MHz / 50) * 50) + 50; else return ((MHz / 50) * 50); } bool scan_cpuid(hwNode & n) { unsigned long maxi, ebx, ecx, edx; hwNode *cpu = NULL; int currentcpu = 0; if (!haveCPUID()) return false; while ((cpu = getcpu(n, currentcpu))) { cpu->claim(true); // claim the cpu and all its children cpuid(currentcpu, 0, maxi, ebx, ecx, edx); maxi &= 0xffff; switch (ebx) { case 0x756e6547: /* Intel */ dointel(maxi, cpu, currentcpu); break; case 0x68747541: /* AMD */ doamd(maxi, cpu, currentcpu); break; case 0x69727943: /* Cyrix */ docyrix(maxi, cpu, currentcpu); break; default: return false; } cpu->claim(true); // claim the cpu and all its children if (cpu->getSize() == 0) cpu->setSize((unsigned long long) (1000000uL * round_MHz(average_MHz(currentcpu)))); currentcpu++; } return true; } #else #ifdef __alpha__ #define BWX (1 << 0) #define FIX (1 << 1) #define CIX (1 << 2) #define MVI (1 << 8) #define PAT (1 << 9) #define PMI (1 << 12) bool scan_cpuid(hwNode & n) { hwNode *cpu = NULL; int currentcpu = 0; unsigned long ver = 0, mask = 0; while (cpu = getcpu(n, currentcpu)) { asm("implver %0":"=r"(ver)); asm("amask %1, %0": "=r"(mask):"r"(-1)); cpu->setVendor("Digital Equipment Corporation"); cpu->setProduct("Alpha"); cpu->setWidth(64); if ((~mask) & BWX) cpu->addCapability("BWX"); if ((~mask) & FIX) cpu->addCapability("FIX"); if ((~mask) & CIX) cpu->addCapability("CIX"); if ((~mask) & MVI) cpu->addCapability("MVI"); if ((~mask) & PAT) cpu->addCapability("PAT"); if ((~mask) & PMI) cpu->addCapability("PMI"); switch (ver) { case 0: cpu->setVersion("EV4"); break; case 1: switch (~mask) { case 0: cpu->setVersion("EV5"); break; case BWX: cpu->setVersion("EV56"); break; case BWX | MVI: cpu->setVersion("PCA56"); break; default: cpu->setVersion("EV5 unknown"); } break; case 2: switch (~mask) { case BWX | FIX | MVI | PAT: cpu->setVersion("EV6"); break; case BWX | FIX | MVI | PAT | CIX: cpu->setVersion("EV67"); break; case BWX | FIX | MVI | PAT | CIX | PMI: cpu->setVersion("EV68"); break; default: cpu->setVersion("EV6 unknown"); } break; case 3: switch (~mask) { case BWX | FIX | MVI | PAT | CIX | PMI: cpu->setVersion("EV7x"); break; default: cpu->setVersion("EV7 unknown"); } break; } currentcpu++; } return true; } #else bool scan_cpuid(hwNode & n) { return true; } #endif /* __alpha__ */ #endif /* __i386__ */
<filename>src/main/java/it/polimi/se2019/rmi/ViewFacadeInterfaceRMIServer.java package it.polimi.se2019.rmi; import java.rmi.RemoteException; import java.util.ArrayList; import java.util.List; public interface ViewFacadeInterfaceRMIServer { /** * Send a string message to a client */ void sendGenericMessage(String message) throws UserTimeoutException, RemoteException; /** * @return the name of the client * @throws UserTimeoutException if the user takes too long to make a choice or disconnects * @throws RemoteException if there is an error with the RMI connection */ String getName() throws UserTimeoutException, RemoteException; /** *@throws UserTimeoutException if the user takes too long to make a choice or disconnects * @throws RemoteException if there is an error with the RMI connection * @return a string specifying the chosen action * @param state the player's state which defines the available actions * */ String chooseAction(String state) throws UserTimeoutException, RemoteException; /** * @throws UserTimeoutException if the user takes too long to make a choice or disconnects * @throws RemoteException if there is an error with the RMI connection * @return index of the power up card to discard * @param powerUps the list of power ups available */ int chooseSpawnLocation(List<String> powerUps) throws UserTimeoutException, RemoteException; /** * @throws UserTimeoutException if the user takes too long to make a choice or disconnects * @throws RemoteException if there is an error with the RMI connection * Choose map type for the match * @return an int specifying the map chosen */ int chooseMap() throws UserTimeoutException, RemoteException; /** * @throws UserTimeoutException if the user takes too long to make a choice or disconnects * @throws RemoteException if there is an error with the RMI connection * choose how many players will be in the game * @return an int specifying the number of players for the game */ int chooseNumberOfPlayers() throws UserTimeoutException, RemoteException; /** * @throws UserTimeoutException if the user takes too long to make a choice or disconnects * @throws RemoteException if there is an error with the RMI connection * @return chosen weapon name * @param weapons the list of available weapons */ String chooseWeapon(List<String> weapons) throws UserTimeoutException, RemoteException; /** * @throws UserTimeoutException if the user takes too long to make a choice or disconnects * @throws RemoteException if there is an error with the RMI connection * @param possibleTargets is a list of the players who can be targeted(their names) * @return a list of chosen targets(names) */ String chooseTargets(List<String> possibleTargets) throws UserTimeoutException, RemoteException; /** * @throws UserTimeoutException if the user takes too long to make a choice or disconnects * @throws RemoteException if there is an error with the RMI connection * @param weapons that can be reloaded * @return the name of the weapon to reload */ String chooseWeaponToReload(List<String> weapons) throws UserTimeoutException, RemoteException; /** * @throws UserTimeoutException if the user takes too long to make a choice or disconnects * @throws RemoteException if there is an error with the RMI connection * @return a list of integers indicating which cards from the player's inventory to use when reloading * @param powerUps the list of available power ups */ List<Integer> choosePowerUpCardsForReload(List<String> powerUps) throws UserTimeoutException, RemoteException; /** * @throws UserTimeoutException if the user takes too long to make a choice or disconnects * @throws RemoteException if there is an error with the RMI connection * @return the integer specifying the chosen effect * @param availableEffects the list of available effects */ Integer chooseIndex(List<String> availableEffects) throws UserTimeoutException, RemoteException; /** * @throws UserTimeoutException if the user takes too long to make a choice or disconnects * @throws RemoteException if there is an error with the RMI connection * @return int indicating which item to pick up from those available */ int chooseItemToGrab() throws UserTimeoutException, RemoteException; /** * @throws UserTimeoutException if the user takes too long to make a choice or disconnects * @throws RemoteException if there is an error with the RMI connection * choose whether to use a firing mode * @return the player's choice * @param description the description of the choice */ Boolean chooseFiringMode(String description) throws UserTimeoutException, RemoteException; /** * @throws UserTimeoutException if the user takes too long to make a choice or disconnects * @throws RemoteException if there is an error with the RMI connection * @return the player's choice * @param description the description of the choice */ Boolean chooseBoolean(String description) throws UserTimeoutException, RemoteException; /** * @throws UserTimeoutException if the user takes too long to make a choice or disconnects * @throws RemoteException if there is an error with the RMI connection * choose a room from those proposed * @return the identifier of the chosen room * @param rooms the list of rooms to choose from */ String chooseRoom(List<String> rooms) throws UserTimeoutException, RemoteException; /** * @throws UserTimeoutException if the user takes too long to make a choice or disconnects * @throws RemoteException if there is an error with the RMI connection * @param targettableSquareCoordinates the coordinates of all targettable squares * @return the coordinates of one chosen square */ List<Integer> chooseTargetSquare(List<List<Integer>> targettableSquareCoordinates) throws UserTimeoutException, RemoteException; /** * @throws UserTimeoutException if the user takes too long to make a choice or disconnects * @throws RemoteException if there is an error with the RMI connection * @return 0 for north, 1 for east, 2 for south or 3 for west * @param possibleDirections the list of available directions */ Integer chooseDirection(List<Integer> possibleDirections) throws UserTimeoutException, RemoteException; /** *@throws UserTimeoutException if the user takes too long to make a choice or disconnects * @throws RemoteException if there is an error with the RMI connection * @param mapInfo specifies the content of all map squares */ void sendMapInfo(List<ArrayList<ArrayList<String>>> mapInfo) throws UserTimeoutException, RemoteException; /** *@throws UserTimeoutException if the user takes too long to make a choice or disconnects * @throws RemoteException if there is an error with the RMI connection * @param playerInfo contains the damage, marks and number of deaths */ void sendPlayerInfo(List<ArrayList<String>> playerInfo) throws UserTimeoutException, RemoteException; /** *@throws UserTimeoutException if the user takes too long to make a choice or disconnects * @throws RemoteException if there is an error with the RMI connection * @param killScoreBoardInfo contains information about scored kills and double kills */ void sendKillScoreBoardInfo(List<ArrayList<String>> killScoreBoardInfo) throws UserTimeoutException, RemoteException; /** *@throws UserTimeoutException if the user takes too long to make a choice or disconnects * @throws RemoteException if there is an error with the RMI connection * @param characterInfo contains information about player's characters */ void sendCharacterInfo(List<String> characterInfo) throws UserTimeoutException, RemoteException; }
Sharad Pawar, a workaholic by nature, manages to remain in the centre of state politics even as he remains bedridden in his bungalow, 'Silver Oak' on Bhulabhai Desai road in Mumbai. While recovering from a surgery on his leg, Pawar recently managed to postpone a defection attempt by his lieutenant Ganesh Naik of Navi Mumbai. Pawar recently managed to get the Bharatiya Janata Party to hold off on inducting senior NCP leader Ganesh Naik, who desperately wanted to join BJP to maintain his political influence in Navi Mumbai. Recently, Naik missed the Thane NCP district leaders' meeting on the eve of local self-government elections. When Amit Shah, BJP president, visited Pawar recently to check on his health, Pawar told Shah not to induct Naik into the BJP for the moment. Accordingly, Naik's entry into BJP has been postponed for an indefinite period. Few people know what transpired between Shah and Pawar, but the incident provided proof that Pawar is still a formidable force in state politics. While recuperating from the surgery, Pawar is meeting field experts and political analysts, as well as his party members to understand the real reason behind the decimation of NCP in the Lok Sabha and state assembly elections. In the first week of December, 73-year old Pawar sustained a leg injury from a fall in his Delhi residence. Pawar was immediately brought to Breach Candy hospital in Mumbai, where he underwent surgery. Sunil Tatkare, state NCP president, said that Pawar is healing well and is likely to address the party workers' meeting on February 6 and 7 in Pune. "We have organised a workshop for 5,000 party leaders and elected representatives. We will decide on the next course of action and strategy in this workshop. We will emerge as the strongest opposition party in the state," Tatkare added.
package zapmixin import ( "github.com/fox-one/mixin-sdk-go" "go.uber.org/zap/zapcore" ) type Handler struct { conversations []string client MixinClient levels []zapcore.Level async bool formatter func(zapcore.Entry) string filter func(zapcore.Entry) bool afterFunc func(zapcore.Entry, *mixin.MessageRequest, error) error } func New(client *mixin.Client, conversations []string, opts ...Option) (*Handler, error) { return newHandler(client, conversations, opts...) } func newHandler(client MixinClient, conversations []string, opts ...Option) (*Handler, error) { h := &Handler{ client: client, conversations: conversations, levels: getLevelThreshold(zapcore.WarnLevel), async: true, } if err := h.Apply(opts...); err != nil { return nil, err } return h, nil } func (h *Handler) Apply(opts ...Option) error { for _, opt := range opts { if err := opt(h); err != nil { return err } } return nil } func (h *Handler) Client() *mixin.Client { return h.client.(*mixin.Client) } func (h *Handler) Hook() func(zapcore.Entry) error { return func(e zapcore.Entry) error { if !h.levelMatched(e.Level) { return nil } if h.filter != nil && !h.filter(e) { return nil } if h.async { go h.sendMessage(e) return nil } return h.sendMessage(e) } } func (h *Handler) levelMatched(l zapcore.Level) bool { for _, level := range h.levels { if level == l { return true } } return false }
from flask import Flask, redirect, url_for, request app = Flask(__name__) app.config['DEBUG'] = True @app.route("/users", defaults={'username': 'Guest'}) @app.route("/users/<username>") def index(username): return "Users Content " + username @app.route("/find/<user>") def show_user(user): return redirect(url_for('index', username=user + ' user found')) if __name__ == '__main__': app.run()
<reponame>ShipChain/dashboard import WalletInterface from "./WalletInterface" class HDWalletInterface extends WalletInterface { constructor( readonly path, pubkey, readonly isHardware: boolean, identifier: string, readonly txSigner, readonly msgSigner, ) { super(pubkey, true, identifier) } signTransaction(txParams) { return super.signTransaction(txParams, this.txSigner) } signMessage(msg) { return super.signMessage(msg, this.msgSigner) } } export default HDWalletInterface
//deshalb musste ich hier viele verschiedene abfragen machen, wie meine variable nachricht ausschaut static int leseZahl(String nachricht) { System.out.print(nachricht+" "); int zahl = 0; do{ try{ zahl = input.nextInt(); input.nextLine(); if(!nachricht.contains("(1/0)")&&((!nachricht.contains("Geld")&&zahl>geld)||zahl<=0)) throw new java.util.InputMismatchException(); } catch(java.util.InputMismatchException err){ zahl = 0; System.out.println("Invalid input, try again"); input.nextLine(); } }while(zahl<=0&&!nachricht.contains("(1/0)")); return zahl; }
n,l = map(int,input().split()) if l<=0 and 0<=l+n-1: print(n*l+n*(n-1)//2) elif 0<=l: print(n*l+n*(n-1)//2-l) else: print(n*l+n*(n-1)//2-l-n+1)
def preprocess_output(self, image, outputs): crop_coords = [] for bounding_box in outputs[0][0]: if bounding_box[2] >= self.threshold: xmin = int(bounding_box[3]*self.image_width) ymin = int(bounding_box[4]*self.image_height) xmax = int(bounding_box[5]*self.image_width) ymax = int(bounding_box[6]*self.image_height) crop_coords.append([xmin,ymin,xmax,ymax]) cropped_face = image[ymin: ymax, xmin: xmax] log.info('[ Face Detector ] Cropped faces extracted') return crop_coords, cropped_face
<gh_stars>0 package com.github.jgzl.gw.gateway.spi.log; import com.github.jgzl.gw.common.core.spi.Join; import com.github.jgzl.gw.common.core.utils.JacksonUtil; import com.github.jgzl.gw.common.gateway.domain.GatewayLog; import lombok.extern.slf4j.Slf4j; @Slf4j(topic = "gateway") @Join public class LocalFileRecordLogService implements RecordLogService { @Override public void recordLog(GatewayLog gatewayLog) { log.info(JacksonUtil.toJsonString(gatewayLog)); } }
/** * This test tests the successful run of the runGetCommand method, retrieving three objects, * and then returning said objects back to the end user. * * This is also testing the construction of the required MatchaGetQuery object. */ @Test public void testRunGetCommand() { List<HashMap> threeGreenEggs = new ArrayList<>() {{ add(new HashMap<>() {{ put("Name", "Green Egg A"); }}); add(new HashMap<>() {{ put("Name", "Green Egg B"); }}); add(new HashMap<>() {{ put("Name", "Green Egg C"); }}); }}; MatchaDbResponseObject getSuccessfulResponseObject = new MatchaDbResponseObject("Retrieval Successful", threeGreenEggs); when(matchaDbTable.getData(any(MatchaGetQuery.class))).thenReturn(threeGreenEggs); MatchaDbTestUtils.compareResponseObjects(getSuccessfulResponseObject, matchaDbRequestService.runGetCommand(getRequestObject)); verify(matchaDbTable).getData(matchaGetQueryCaptor.capture()); MatchaGetQuery capturedMatchaGetQuery = matchaGetQueryCaptor.getValue(); Assert.assertTrue(capturedMatchaGetQuery.toString().equals(getGreenEggsQuery.toString())); }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ro.nextreports.designer.action.report.layout.cell; import ro.nextreports.engine.util.ObjectCloner; import ro.nextreports.engine.band.BandElement; import ro.nextreports.engine.band.ImageBandElement; import ro.nextreports.engine.band.ImageColumnBandElement; import ro.nextreports.engine.ReportLayout; import javax.swing.*; import javax.imageio.ImageIO; import ro.nextreports.designer.Globals; import ro.nextreports.designer.ImageResizePanel; import ro.nextreports.designer.LayoutHelper; import ro.nextreports.designer.ReportGrid; import ro.nextreports.designer.action.undo.LayoutEdit; import ro.nextreports.designer.grid.Cell; import ro.nextreports.designer.grid.SelectionModel; import ro.nextreports.designer.grid.event.SelectionModelEvent; import ro.nextreports.designer.ui.BaseDialog; import ro.nextreports.designer.util.I18NSupport; import java.awt.event.ActionEvent; import java.awt.image.BufferedImage; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** * User: mihai.panaitescu * Date: 23-Mar-2010 * Time: 11:07:35 */ public class ImageSizeAction extends AbstractAction { public ImageSizeAction() { super(); putValue(Action.NAME, I18NSupport.getString("size.image.action.name")); } public void actionPerformed(final ActionEvent event) { final ReportGrid grid = Globals.getReportGrid(); SelectionModel selectionModel = grid.getSelectionModel(); List<Cell> cells = selectionModel.getSelectedCells(); final List<BandElement> olds = new ArrayList<BandElement>(); for (Cell cell : cells) { BandElement be = grid.getBandElement(cell.getRow(), cell.getColumn()); if ((be instanceof ImageBandElement) || (be instanceof ImageColumnBandElement)) { olds.add(be); } } final ReportLayout oldLayout = ObjectCloner.silenceDeepCopy(LayoutHelper.getReportLayout()); Thread executorThread = new Thread(new Runnable() { public void run() { int[] size = new int[] {0,0}; for (int i=0, len = olds.size(); i<len; i++) { BandElement oldElement = olds.get(i); int[] size2; if (oldElement instanceof ImageBandElement) { size2 = getRealImageSize(((ImageBandElement)oldElement).getImage()); } else { // for ImageColumnBandElement we put a static actual size (we do not go to database to compute it) size2 = new int[] {50,50}; } if (i == 0) { size = size2; } else { if ((size[0] != size2[0]) || (size[1] != size2[1])) { size[0] = size[1] = 0; break; } } } final int[] s = size; SwingUtilities.invokeLater(new Runnable() { public void run() { final ImageResizePanel panel = new ImageResizePanel(s, olds.get(0)); final BaseDialog dialog = new BaseDialog(panel, I18NSupport.getString("size.image.action.name")); dialog.pack(); dialog.setLocationRelativeTo(Globals.getMainFrame()); dialog.setVisible(true); if (!dialog.okPressed()) { return; } for (int i = 0, len = olds.size(); i < len; i++) { BandElement be = olds.get(i); if (be instanceof ImageBandElement) { ImageBandElement ibe = (ImageBandElement)be; ibe.setWidth(panel.getImageWidth()); ibe.setHeight(panel.getImageHeight()); } else { ImageColumnBandElement icbe = (ImageColumnBandElement)be; icbe.setWidth(panel.getImageWidth()); icbe.setHeight(panel.getImageHeight()); } } SelectionModelEvent selectionEvent = new SelectionModelEvent(Globals.getReportGrid().getSelectionModel(), false); Globals.getReportDesignerPanel().getPropertiesPanel().selectionChanged(selectionEvent); ReportLayout newLayout = ObjectCloner.silenceDeepCopy(LayoutHelper.getReportLayout()); Globals.getReportUndoManager().addEdit(new LayoutEdit(oldLayout, newLayout, I18NSupport.getString("size.image.action.name"))); } }); } }, "NEXT : " + getClass().getSimpleName()); executorThread.start(); } private int[] getRealImageSize(String image) { InputStream is = getClass().getResourceAsStream("/" + image); int[] size = new int[2]; size[0] = 0; size[1] = 0; try { BufferedImage img = ImageIO.read(is); if (img != null) { size[0] = img.getWidth(); size[1] = img.getHeight(); } } catch (IOException ex) { ex.printStackTrace(); } finally { try { is.close(); } catch (IOException e) { e.printStackTrace(); } } return size; } }
async def send_terminal_stdout_to_backend(self): if not self.ws_connected: log.debug("leaving send_terminal_stdout_to_backend") return -1 while self.run_sending_thread: try: shell_stdout = os.read(self.master, 102400) resp_header = { "proto": 1, "typ": MESSAGE_TYPE_SHELL_COMMAND, "sid": self.sid, } resp_props = {"status": 1} response = { "hdr": resp_header, "props": resp_props, "body": shell_stdout, } await self.client.send(msgpack.packb(response, use_bin_type=True)) except TypeError as type_error: log.error(f"send_terminal_stdout_to_backend: {type_error}") except IOError as io_error: if io_error.errno == 5: log.info("Session closed.") else: log.error(f"send_terminal_stdout_to_backend: {io_error}")
from aiohttp_admin2.exceptions import AdminException __all__ = [ 'ClientException', 'InstanceDoesNotExist', 'FilterException', 'BadParameters', 'CURSOR_PAGINATION_ERROR_MESSAGE', ] CURSOR_PAGINATION_ERROR_MESSAGE = \ "Pagination by cursor available only together with sorting by primary key" class ClientException(AdminException): """The main exception for client exceptions.""" class InstanceDoesNotExist(ClientException): """Manager can't return instance because it does not exists.""" class FilterException(ClientException): """Manager can't apply filter to query.""" class BadParameters(AdminException): """Bad arguments for method."""
<filename>src/downloadArtifact.ts import { getOctokit } from "@actions/github"; import { exec } from "@actions/exec"; import axios from "axios"; import { createWriteStream } from "fs"; import { Readable } from "stream"; import { fileSync } from "tmp"; type Api = ReturnType<typeof getOctokit>; interface DownloadArgs { api: Api; workflowRunId: number; artifactName: string; path: string; owner: string; repo: string; } export default async function downloadArtifact({ api, workflowRunId, artifactName, path, owner, repo, }: DownloadArgs) { const url = await resolveArchiveDownloadUrl({ api, workflowRunId, artifactName, owner, repo, }); const archive = fileSync({ postfix: ".zip" }); await downloadFile(url, archive.name); await extractArchive(archive.name, path); } interface ResolveArgs { api: Api; workflowRunId: number; artifactName: string; repo: string; owner: string; } export function resolveArchiveDownloadUrl({ api, workflowRunId, artifactName, repo, owner, }: ResolveArgs): Promise<string> { return api.actions .listWorkflowRunArtifacts({ owner, repo, run_id: workflowRunId, }) .then(({ data: { artifacts } }) => { const artifact = artifacts.find((a) => a.name === artifactName); if (!artifact) { throw Error(`No artifact "${artifactName}" found`); } return artifact.id; }) .then((artifact_id) => api.actions.downloadArtifact({ archive_format: "zip", artifact_id, repo, owner, request: { redirect: "manual" }, }) ) .then((response) => { const url = response.headers.location; if (!url) throw Error("No download url specified"); return url; }); } export function downloadFile(url: string, destination: string): Promise<void> { const writer = createWriteStream(destination); return axios({ method: "get", url: url, responseType: "stream", }).then((response) => { return new Promise<void>((resolve, reject) => { const stream = response.data as Readable; stream.pipe(writer); writer.on("error", (err) => { writer.close(); reject(err); }); writer.on("close", () => void resolve()); }); }); } export async function extractArchive( archivePath: string, destinationFolder: string ): Promise<void> { const code = await exec("unzip", [ "-qo", archivePath, "-d", destinationFolder, ]); if (code != 0) throw Error("unzip exited with code " + code); }
/** * This class tabulates the frequency associated with * the integers presented to it via the collect() method * Every value presented is interpreted as an integer * For every value presented a count is maintained. * There could be space/time performance issues if * the number of different values presented is large. * <p> * This class can be useful for tabulating a * discrete histogram over the values (integers) presented. * * @author rossetti */ public class IntegerFrequency { /** * A Cell represents a value, count pairing */ private Map<Cell, Cell> myCells; /** * Collects statistical information */ protected Statistic myStatistic; /** * Used as a temporary cell during tabulation */ private Cell myTemp; /** * The smallest value allowed. Any * values &lt; to this value will be counted * in the underflow count */ private int myLowerLimit; /** * The largest value allowed. Any * values &gt; to this value will be counted * in the overflow count */ private int myUpperLimit; /** * Counts of values located below first bin. */ private int myUnderFlowCount; /** * Counts of values located above last bin. */ private int myOverFlowCount; protected String myName; /** * Can tabulate any integer value */ public IntegerFrequency() { this(Integer.MIN_VALUE, Integer.MAX_VALUE, null); } /** Can tabulate any integer value * * @param name a name for the instance */ public IntegerFrequency(String name) { this(Integer.MIN_VALUE, Integer.MAX_VALUE, name); } /** * * @param lowerLimit the defined lower limit of the integers, values less than this are not tabulated * @param upperLimit the defined upper limit of the integers, values less than this are not tabulated */ public IntegerFrequency(int lowerLimit, int upperLimit) { this(lowerLimit, upperLimit, null); } /** * * @param lowerLimit the defined lower limit of the integers, values less than this are not tabulated * @param upperLimit the defined upper limit of the integers, values less than this are not tabulated * @param name a name for the instance */ public IntegerFrequency(int lowerLimit, int upperLimit, String name) { if (lowerLimit >= upperLimit) { throw new IllegalArgumentException("The lower limit must be < the upper limit"); } myName = name; myLowerLimit = lowerLimit; myUpperLimit = upperLimit; myStatistic = new Statistic(name); myTemp = new Cell(); myCells = new HashMap<Cell, Cell>(); } /** * @return the assigned name */ public final String getName() { return myName; } /** * @param name the name to assign */ public final void setName(String name) { myName = name; } /** * @param intArray collects on the values in the array */ public final void collect(int[] intArray) { Objects.requireNonNull(intArray, "The array was null"); for (int i : intArray) { collect(i); } } /** * Tabulates the count of the number of i's * presented. * * @param i the presented integer */ public void collect(int i) { myStatistic.collect(i); if (i < myLowerLimit) { myUnderFlowCount = myUnderFlowCount + 1; } if (i > myUpperLimit) { myOverFlowCount = myOverFlowCount + 1; } // myLowerLimit <= x <= myUpperLimit myTemp.myValue = i; Cell c = myCells.get(myTemp); if (c == null) { c = new Cell(i); myCells.put(c, c); } else { c.myCount = c.myCount + 1; } } /** * * @param i casts the double down to an int */ public void collect(double i){ collect((int)i); } /** * * @param array casts the doubles to ints */ public void collect(double[] array){ Objects.requireNonNull(array, "The array was null"); for (double i : array) { collect(i); } } /** * Resets the statistical collection */ public void reset() { myOverFlowCount = 0; myUnderFlowCount = 0; myStatistic.reset(); myCells.clear(); } /** * The number of observations that fell below the first bin's lower limit * * @return number of observations that fell below the first bin's lower limit */ public final int getUnderFlowCount() { return (myUnderFlowCount); } /** * The number of observations that fell past the last bin's upper limit * * @return number of observations that fell past the last bin's upper limit */ public final int getOverFlowCount() { return (myOverFlowCount); } /** * Returns an array of size getNumberOfCells() containing * the values increasing by value * * @return the array of values observed or an empty array */ public final int[] getValues() { if (myCells.isEmpty()) { return new int[0]; } SortedSet<Cell> cellSet = getCells(); int[] v = new int[myCells.size()]; int i = 0; for (Cell c : cellSet) { v[i] = c.myValue; i++; } return v; } /** * Returns an array of size getNumberOfCells() containing * the frequencies by value * * @return the array of frequencies observed or an empty array */ public final int[] getFrequencies() { if (myCells.isEmpty()) { return new int[0]; } SortedSet<Cell> cellSet = getCells(); int[] v = new int[myCells.size()]; int i = 0; for (Cell c : cellSet) { v[i] = c.myCount; i++; } return v; } /** * Returns an array of size getNumberOfCells() containing * the proportion by value * * @return the array of proportions observed or an empty array */ public final double[] getProportions() { if (myCells.isEmpty()) { return new double[0]; } SortedSet<Cell> cellSet = getCells(); double[] v = new double[myCells.size()]; int i = 0; for (Cell c : cellSet) { v[i] = c.myProportion; i++; } return v; } /** * Returns the cumulative frequency up to an including i * * @param i the integer for the desired frequency * @return the cumulative frequency */ public final int getCumulativeFrequency(int i) { if (myCells.isEmpty()) { return 0; } SortedSet<Cell> cellSet = getCells(); int sum = 0; for (Cell c : cellSet) { if (c.myValue <= i) { sum = sum + c.myCount; } else { break; } } return sum; } /** * Returns the cumulative proportion up to an including i * * @param i the integer for the desired proportion * @return the cumulative proportion */ public final double getCumulativeProportion(int i) { if (myCells.isEmpty()) { return 0; } double n = getTotalCount(); return (getCumulativeFrequency(i) / n); } /** * Returns a n by 2 array of value, frequency * pairs where n = getNummberOfCells() * * @return the array or an empty array */ public final int[][] getValueFrequencies() { if (myCells.isEmpty()) { return new int[0][0]; } SortedSet<Cell> cellSet = getCells(); int[][] v = new int[myCells.size()][2]; int i = 0; for (Cell c : cellSet) { v[i][0] = c.myValue; v[i][1] = c.myCount; i++; } return v; } /** * Returns a 2 by n array of value, proportion pairs * where n = getNumberOfCells() * row 0 is the values * row 1 is the proportions * * @return the array or an empty array */ public final double[][] getValueProportions() { if (myCells.isEmpty()) { return new double[0][0]; } SortedSet<Cell> cellSet = getCells(); double[][] v = new double[myCells.size()][2]; int i = 0; for (Cell c : cellSet) { v[0][i] = c.myValue; v[1][i] = c.myProportion; i++; } return v; } /** * Returns a 2 by n array of value, cumulative proportion pairs * where n = getNumberOfCells() * row 0 is the values * row 1 is the cumulative proportions * * @return the array or an empty array */ public final double[][] getValueCumulativeProportions() { if (myCells.isEmpty()) { return new double[0][0]; } SortedSet<Cell> cellSet = getCells(); double[][] v = new double[myCells.size()][2]; int i = 0; double sum = 0.0; for (Cell c : cellSet) { v[0][i] = c.myValue; sum = sum + c.myProportion; v[1][i] = sum; i++; } return v; } /** * Returns the number of cells tabulated * * @return the number of cells tabulated */ public final int getNumberOfCells() { return myCells.size(); } /** * The total count associated with the values * * @return total count associated with the values */ public final int getTotalCount() { return ((int) myStatistic.getCount());//TODO need to check //return ((int) myStatistic.getSumOfWeights()); } /** * Returns the current frequency for the provided integer * * @param x the provided integer * @return the frequency */ public final int getFrequency(int x) { myTemp.myValue = x; Cell c = myCells.get(myTemp); if (c == null) { return 0; } else { return c.myCount; } } /** * Gets the proportion of the observations that * are equal to the supplied integer * * @param x the integer * @return the proportion */ public final double getProportion(int x) { myTemp.myValue = x; Cell c = myCells.get(myTemp); if (c == null) { return 0; } else { double n = getTotalCount(); return c.myCount / n; } } /** * Interprets the elements of x[] as values * and returns an array representing the frequency * for each value * * @param x the values for the frequencies * @return the returned frequencies */ public final int[] getFrequencies(int[] x) { int[] f = new int[x.length]; for (int j = 0; j < x.length; j++) { f[j] = getFrequency(x[j]); } return f; } /** * Returns a copy of the cells in a list * ordered by the value of each cell, 0th element * is cell with smallest value, etc * * @return the list */ public final List<Cell> getCellList() { SortedSet<Cell> cellSet = getCells(); List<Cell> list = new ArrayList<Cell>(); for (Cell c : cellSet) { list.add(c.newInstance()); } return list; } /** * @return a DEmpirical based on the frequencies */ public DEmpiricalCDF createDEmpiricalCDF() { // form the array of parameters double[][] x = getValueCumulativeProportions(); return (new DEmpiricalCDF(x[0], x[1])); } /** * Returns a sorted set containing the cells * * @return the sorted set of cells */ protected final SortedSet<Cell> getCells() { SortedSet<Cell> cellSet = new TreeSet<Cell>(); for (Cell c : myCells.keySet()) { double n = getTotalCount(); c.myProportion = c.myCount / n; cellSet.add(c); } return (cellSet); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("Frequency Tabulation ").append(getName()).append(System.lineSeparator()); sb.append("----------------------------------------").append(System.lineSeparator()); sb.append("Number of cells = ").append(getNumberOfCells()).append(System.lineSeparator()); sb.append("Lower limit = ").append(myLowerLimit).append(System.lineSeparator()); sb.append("Upper limit = ").append(myUpperLimit).append(System.lineSeparator()); sb.append("Under flow count = ").append(myUnderFlowCount).append(System.lineSeparator()); sb.append("Over flow count = ").append(myOverFlowCount).append(System.lineSeparator()); sb.append("Total count = ").append(getTotalCount()).append(System.lineSeparator()); sb.append("----------------------------------------").append(System.lineSeparator()); sb.append("Value \t Count \t Proportion\n"); for (Cell c : getCells()) { sb.append(c); } sb.append("----------------------------------------").append(System.lineSeparator()); sb.append(System.lineSeparator()); sb.append(myStatistic.toString()); return (sb.toString()); } /** * * @return a Statistic over the observed integers */ public final Statistic getStatistic() { return myStatistic.newInstance(); } /** * Holds the values and their counts */ public class Cell implements Comparable<Cell> { private int myValue; private int myCount; private double myProportion = 0.0; public Cell() { this(0); } public Cell(int i) { myValue = i; myCount = 1; } public final int getValue() { return myValue; } public final int getCount() { return myCount; } public final double getProportion() { return myProportion; } @Override public final int compareTo(Cell cell) { if (myValue < cell.myValue) { return (-1); } if (myValue > cell.myValue) { return (1); } return 0; } @Override public final boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Cell other = (Cell) obj; if (this.myValue != other.myValue) { return false; } return true; } @Override public final int hashCode() { return myValue; } @Override public final String toString() { return (myValue + " \t " + myCount + " \t " + myProportion + "\n"); } public Cell newInstance() { Cell c = new Cell(); c.myValue = this.myValue; c.myCount = this.myCount; c.myProportion = this.myProportion; return c; } } }
<reponame>ArgonneCPAC/shamnet """ """ import numpy as np from ..shmf_bpl import log10_cumulative_shmf_bpl def test1(): lgmp = np.linspace(8, 17, 5000) lg_cumu_nd = log10_cumulative_shmf_bpl(lgmp, 0) assert np.all(np.isfinite(lg_cumu_nd))
def transform(self, corpus: Corpus, selector: Callable[[CorpusComponent], bool] = lambda x: True) -> Corpus: objs = [] for obj in corpus.iter_objs(self.obj_type): if selector(obj): objs.append(obj) else: obj.add_meta(self.clf_attribute_name, None) obj.add_meta(self.clf_prob_attribute_name, None) obj_ids = [obj.id for obj in objs] X = corpus.get_vector_matrix(self.vector_name).get_vectors(obj_ids, self.columns) clfs, clfs_probs = self.clf.predict(X), self.clf.predict_proba(X)[:, 1] for idx, (clf, clf_prob) in enumerate(list(zip(clfs, clfs_probs))): obj = objs[idx] obj.add_meta(self.clf_attribute_name, clf) obj.add_meta(self.clf_prob_attribute_name, clf_prob) return corpus
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. /// Operation shape for `AddLayerVersionPermission`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`add_layer_version_permission`](crate::client::Client::add_layer_version_permission). /// /// See [`crate::client::fluent_builders::AddLayerVersionPermission`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct AddLayerVersionPermission { _private: (), } impl AddLayerVersionPermission { /// Creates a new builder-style object to manufacture [`AddLayerVersionPermissionInput`](crate::input::AddLayerVersionPermissionInput) pub fn builder() -> crate::input::add_layer_version_permission_input::Builder { crate::input::add_layer_version_permission_input::Builder::default() } /// Creates a new `AddLayerVersionPermission` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for AddLayerVersionPermission { type Output = std::result::Result< crate::output::AddLayerVersionPermissionOutput, crate::error::AddLayerVersionPermissionError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 201 { crate::operation_deser::parse_add_layer_version_permission_error(response) } else { crate::operation_deser::parse_add_layer_version_permission_response(response) } } } /// Operation shape for `AddPermission`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`add_permission`](crate::client::Client::add_permission). /// /// See [`crate::client::fluent_builders::AddPermission`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct AddPermission { _private: (), } impl AddPermission { /// Creates a new builder-style object to manufacture [`AddPermissionInput`](crate::input::AddPermissionInput) pub fn builder() -> crate::input::add_permission_input::Builder { crate::input::add_permission_input::Builder::default() } /// Creates a new `AddPermission` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for AddPermission { type Output = std::result::Result<crate::output::AddPermissionOutput, crate::error::AddPermissionError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 201 { crate::operation_deser::parse_add_permission_error(response) } else { crate::operation_deser::parse_add_permission_response(response) } } } /// Operation shape for `CreateAlias`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`create_alias`](crate::client::Client::create_alias). /// /// See [`crate::client::fluent_builders::CreateAlias`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct CreateAlias { _private: (), } impl CreateAlias { /// Creates a new builder-style object to manufacture [`CreateAliasInput`](crate::input::CreateAliasInput) pub fn builder() -> crate::input::create_alias_input::Builder { crate::input::create_alias_input::Builder::default() } /// Creates a new `CreateAlias` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for CreateAlias { type Output = std::result::Result<crate::output::CreateAliasOutput, crate::error::CreateAliasError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 201 { crate::operation_deser::parse_create_alias_error(response) } else { crate::operation_deser::parse_create_alias_response(response) } } } /// Operation shape for `CreateCodeSigningConfig`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`create_code_signing_config`](crate::client::Client::create_code_signing_config). /// /// See [`crate::client::fluent_builders::CreateCodeSigningConfig`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct CreateCodeSigningConfig { _private: (), } impl CreateCodeSigningConfig { /// Creates a new builder-style object to manufacture [`CreateCodeSigningConfigInput`](crate::input::CreateCodeSigningConfigInput) pub fn builder() -> crate::input::create_code_signing_config_input::Builder { crate::input::create_code_signing_config_input::Builder::default() } /// Creates a new `CreateCodeSigningConfig` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for CreateCodeSigningConfig { type Output = std::result::Result< crate::output::CreateCodeSigningConfigOutput, crate::error::CreateCodeSigningConfigError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 201 { crate::operation_deser::parse_create_code_signing_config_error(response) } else { crate::operation_deser::parse_create_code_signing_config_response(response) } } } /// Operation shape for `CreateEventSourceMapping`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`create_event_source_mapping`](crate::client::Client::create_event_source_mapping). /// /// See [`crate::client::fluent_builders::CreateEventSourceMapping`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct CreateEventSourceMapping { _private: (), } impl CreateEventSourceMapping { /// Creates a new builder-style object to manufacture [`CreateEventSourceMappingInput`](crate::input::CreateEventSourceMappingInput) pub fn builder() -> crate::input::create_event_source_mapping_input::Builder { crate::input::create_event_source_mapping_input::Builder::default() } /// Creates a new `CreateEventSourceMapping` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for CreateEventSourceMapping { type Output = std::result::Result< crate::output::CreateEventSourceMappingOutput, crate::error::CreateEventSourceMappingError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 202 { crate::operation_deser::parse_create_event_source_mapping_error(response) } else { crate::operation_deser::parse_create_event_source_mapping_response(response) } } } /// Operation shape for `CreateFunction`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`create_function`](crate::client::Client::create_function). /// /// See [`crate::client::fluent_builders::CreateFunction`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct CreateFunction { _private: (), } impl CreateFunction { /// Creates a new builder-style object to manufacture [`CreateFunctionInput`](crate::input::CreateFunctionInput) pub fn builder() -> crate::input::create_function_input::Builder { crate::input::create_function_input::Builder::default() } /// Creates a new `CreateFunction` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for CreateFunction { type Output = std::result::Result<crate::output::CreateFunctionOutput, crate::error::CreateFunctionError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 201 { crate::operation_deser::parse_create_function_error(response) } else { crate::operation_deser::parse_create_function_response(response) } } } /// Operation shape for `DeleteAlias`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_alias`](crate::client::Client::delete_alias). /// /// See [`crate::client::fluent_builders::DeleteAlias`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteAlias { _private: (), } impl DeleteAlias { /// Creates a new builder-style object to manufacture [`DeleteAliasInput`](crate::input::DeleteAliasInput) pub fn builder() -> crate::input::delete_alias_input::Builder { crate::input::delete_alias_input::Builder::default() } /// Creates a new `DeleteAlias` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteAlias { type Output = std::result::Result<crate::output::DeleteAliasOutput, crate::error::DeleteAliasError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_alias_error(response) } else { crate::operation_deser::parse_delete_alias_response(response) } } } /// Operation shape for `DeleteCodeSigningConfig`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_code_signing_config`](crate::client::Client::delete_code_signing_config). /// /// See [`crate::client::fluent_builders::DeleteCodeSigningConfig`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteCodeSigningConfig { _private: (), } impl DeleteCodeSigningConfig { /// Creates a new builder-style object to manufacture [`DeleteCodeSigningConfigInput`](crate::input::DeleteCodeSigningConfigInput) pub fn builder() -> crate::input::delete_code_signing_config_input::Builder { crate::input::delete_code_signing_config_input::Builder::default() } /// Creates a new `DeleteCodeSigningConfig` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteCodeSigningConfig { type Output = std::result::Result< crate::output::DeleteCodeSigningConfigOutput, crate::error::DeleteCodeSigningConfigError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_code_signing_config_error(response) } else { crate::operation_deser::parse_delete_code_signing_config_response(response) } } } /// Operation shape for `DeleteEventSourceMapping`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_event_source_mapping`](crate::client::Client::delete_event_source_mapping). /// /// See [`crate::client::fluent_builders::DeleteEventSourceMapping`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteEventSourceMapping { _private: (), } impl DeleteEventSourceMapping { /// Creates a new builder-style object to manufacture [`DeleteEventSourceMappingInput`](crate::input::DeleteEventSourceMappingInput) pub fn builder() -> crate::input::delete_event_source_mapping_input::Builder { crate::input::delete_event_source_mapping_input::Builder::default() } /// Creates a new `DeleteEventSourceMapping` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteEventSourceMapping { type Output = std::result::Result< crate::output::DeleteEventSourceMappingOutput, crate::error::DeleteEventSourceMappingError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 202 { crate::operation_deser::parse_delete_event_source_mapping_error(response) } else { crate::operation_deser::parse_delete_event_source_mapping_response(response) } } } /// Operation shape for `DeleteFunction`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_function`](crate::client::Client::delete_function). /// /// See [`crate::client::fluent_builders::DeleteFunction`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteFunction { _private: (), } impl DeleteFunction { /// Creates a new builder-style object to manufacture [`DeleteFunctionInput`](crate::input::DeleteFunctionInput) pub fn builder() -> crate::input::delete_function_input::Builder { crate::input::delete_function_input::Builder::default() } /// Creates a new `DeleteFunction` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteFunction { type Output = std::result::Result<crate::output::DeleteFunctionOutput, crate::error::DeleteFunctionError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_function_error(response) } else { crate::operation_deser::parse_delete_function_response(response) } } } /// Operation shape for `DeleteFunctionCodeSigningConfig`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_function_code_signing_config`](crate::client::Client::delete_function_code_signing_config). /// /// See [`crate::client::fluent_builders::DeleteFunctionCodeSigningConfig`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteFunctionCodeSigningConfig { _private: (), } impl DeleteFunctionCodeSigningConfig { /// Creates a new builder-style object to manufacture [`DeleteFunctionCodeSigningConfigInput`](crate::input::DeleteFunctionCodeSigningConfigInput) pub fn builder() -> crate::input::delete_function_code_signing_config_input::Builder { crate::input::delete_function_code_signing_config_input::Builder::default() } /// Creates a new `DeleteFunctionCodeSigningConfig` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteFunctionCodeSigningConfig { type Output = std::result::Result< crate::output::DeleteFunctionCodeSigningConfigOutput, crate::error::DeleteFunctionCodeSigningConfigError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_function_code_signing_config_error(response) } else { crate::operation_deser::parse_delete_function_code_signing_config_response(response) } } } /// Operation shape for `DeleteFunctionConcurrency`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_function_concurrency`](crate::client::Client::delete_function_concurrency). /// /// See [`crate::client::fluent_builders::DeleteFunctionConcurrency`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteFunctionConcurrency { _private: (), } impl DeleteFunctionConcurrency { /// Creates a new builder-style object to manufacture [`DeleteFunctionConcurrencyInput`](crate::input::DeleteFunctionConcurrencyInput) pub fn builder() -> crate::input::delete_function_concurrency_input::Builder { crate::input::delete_function_concurrency_input::Builder::default() } /// Creates a new `DeleteFunctionConcurrency` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteFunctionConcurrency { type Output = std::result::Result< crate::output::DeleteFunctionConcurrencyOutput, crate::error::DeleteFunctionConcurrencyError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_function_concurrency_error(response) } else { crate::operation_deser::parse_delete_function_concurrency_response(response) } } } /// Operation shape for `DeleteFunctionEventInvokeConfig`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_function_event_invoke_config`](crate::client::Client::delete_function_event_invoke_config). /// /// See [`crate::client::fluent_builders::DeleteFunctionEventInvokeConfig`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteFunctionEventInvokeConfig { _private: (), } impl DeleteFunctionEventInvokeConfig { /// Creates a new builder-style object to manufacture [`DeleteFunctionEventInvokeConfigInput`](crate::input::DeleteFunctionEventInvokeConfigInput) pub fn builder() -> crate::input::delete_function_event_invoke_config_input::Builder { crate::input::delete_function_event_invoke_config_input::Builder::default() } /// Creates a new `DeleteFunctionEventInvokeConfig` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteFunctionEventInvokeConfig { type Output = std::result::Result< crate::output::DeleteFunctionEventInvokeConfigOutput, crate::error::DeleteFunctionEventInvokeConfigError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_function_event_invoke_config_error(response) } else { crate::operation_deser::parse_delete_function_event_invoke_config_response(response) } } } /// Operation shape for `DeleteLayerVersion`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_layer_version`](crate::client::Client::delete_layer_version). /// /// See [`crate::client::fluent_builders::DeleteLayerVersion`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteLayerVersion { _private: (), } impl DeleteLayerVersion { /// Creates a new builder-style object to manufacture [`DeleteLayerVersionInput`](crate::input::DeleteLayerVersionInput) pub fn builder() -> crate::input::delete_layer_version_input::Builder { crate::input::delete_layer_version_input::Builder::default() } /// Creates a new `DeleteLayerVersion` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteLayerVersion { type Output = std::result::Result< crate::output::DeleteLayerVersionOutput, crate::error::DeleteLayerVersionError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_layer_version_error(response) } else { crate::operation_deser::parse_delete_layer_version_response(response) } } } /// Operation shape for `DeleteProvisionedConcurrencyConfig`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_provisioned_concurrency_config`](crate::client::Client::delete_provisioned_concurrency_config). /// /// See [`crate::client::fluent_builders::DeleteProvisionedConcurrencyConfig`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteProvisionedConcurrencyConfig { _private: (), } impl DeleteProvisionedConcurrencyConfig { /// Creates a new builder-style object to manufacture [`DeleteProvisionedConcurrencyConfigInput`](crate::input::DeleteProvisionedConcurrencyConfigInput) pub fn builder() -> crate::input::delete_provisioned_concurrency_config_input::Builder { crate::input::delete_provisioned_concurrency_config_input::Builder::default() } /// Creates a new `DeleteProvisionedConcurrencyConfig` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteProvisionedConcurrencyConfig { type Output = std::result::Result< crate::output::DeleteProvisionedConcurrencyConfigOutput, crate::error::DeleteProvisionedConcurrencyConfigError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_provisioned_concurrency_config_error(response) } else { crate::operation_deser::parse_delete_provisioned_concurrency_config_response(response) } } } /// Operation shape for `GetAccountSettings`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_account_settings`](crate::client::Client::get_account_settings). /// /// See [`crate::client::fluent_builders::GetAccountSettings`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetAccountSettings { _private: (), } impl GetAccountSettings { /// Creates a new builder-style object to manufacture [`GetAccountSettingsInput`](crate::input::GetAccountSettingsInput) pub fn builder() -> crate::input::get_account_settings_input::Builder { crate::input::get_account_settings_input::Builder::default() } /// Creates a new `GetAccountSettings` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetAccountSettings { type Output = std::result::Result< crate::output::GetAccountSettingsOutput, crate::error::GetAccountSettingsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_account_settings_error(response) } else { crate::operation_deser::parse_get_account_settings_response(response) } } } /// Operation shape for `GetAlias`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_alias`](crate::client::Client::get_alias). /// /// See [`crate::client::fluent_builders::GetAlias`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetAlias { _private: (), } impl GetAlias { /// Creates a new builder-style object to manufacture [`GetAliasInput`](crate::input::GetAliasInput) pub fn builder() -> crate::input::get_alias_input::Builder { crate::input::get_alias_input::Builder::default() } /// Creates a new `GetAlias` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetAlias { type Output = std::result::Result<crate::output::GetAliasOutput, crate::error::GetAliasError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_alias_error(response) } else { crate::operation_deser::parse_get_alias_response(response) } } } /// Operation shape for `GetCodeSigningConfig`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_code_signing_config`](crate::client::Client::get_code_signing_config). /// /// See [`crate::client::fluent_builders::GetCodeSigningConfig`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetCodeSigningConfig { _private: (), } impl GetCodeSigningConfig { /// Creates a new builder-style object to manufacture [`GetCodeSigningConfigInput`](crate::input::GetCodeSigningConfigInput) pub fn builder() -> crate::input::get_code_signing_config_input::Builder { crate::input::get_code_signing_config_input::Builder::default() } /// Creates a new `GetCodeSigningConfig` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetCodeSigningConfig { type Output = std::result::Result< crate::output::GetCodeSigningConfigOutput, crate::error::GetCodeSigningConfigError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_code_signing_config_error(response) } else { crate::operation_deser::parse_get_code_signing_config_response(response) } } } /// Operation shape for `GetEventSourceMapping`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_event_source_mapping`](crate::client::Client::get_event_source_mapping). /// /// See [`crate::client::fluent_builders::GetEventSourceMapping`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetEventSourceMapping { _private: (), } impl GetEventSourceMapping { /// Creates a new builder-style object to manufacture [`GetEventSourceMappingInput`](crate::input::GetEventSourceMappingInput) pub fn builder() -> crate::input::get_event_source_mapping_input::Builder { crate::input::get_event_source_mapping_input::Builder::default() } /// Creates a new `GetEventSourceMapping` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetEventSourceMapping { type Output = std::result::Result< crate::output::GetEventSourceMappingOutput, crate::error::GetEventSourceMappingError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_event_source_mapping_error(response) } else { crate::operation_deser::parse_get_event_source_mapping_response(response) } } } /// Operation shape for `GetFunction`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_function`](crate::client::Client::get_function). /// /// See [`crate::client::fluent_builders::GetFunction`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetFunction { _private: (), } impl GetFunction { /// Creates a new builder-style object to manufacture [`GetFunctionInput`](crate::input::GetFunctionInput) pub fn builder() -> crate::input::get_function_input::Builder { crate::input::get_function_input::Builder::default() } /// Creates a new `GetFunction` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetFunction { type Output = std::result::Result<crate::output::GetFunctionOutput, crate::error::GetFunctionError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_function_error(response) } else { crate::operation_deser::parse_get_function_response(response) } } } /// Operation shape for `GetFunctionCodeSigningConfig`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_function_code_signing_config`](crate::client::Client::get_function_code_signing_config). /// /// See [`crate::client::fluent_builders::GetFunctionCodeSigningConfig`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetFunctionCodeSigningConfig { _private: (), } impl GetFunctionCodeSigningConfig { /// Creates a new builder-style object to manufacture [`GetFunctionCodeSigningConfigInput`](crate::input::GetFunctionCodeSigningConfigInput) pub fn builder() -> crate::input::get_function_code_signing_config_input::Builder { crate::input::get_function_code_signing_config_input::Builder::default() } /// Creates a new `GetFunctionCodeSigningConfig` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetFunctionCodeSigningConfig { type Output = std::result::Result< crate::output::GetFunctionCodeSigningConfigOutput, crate::error::GetFunctionCodeSigningConfigError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_function_code_signing_config_error(response) } else { crate::operation_deser::parse_get_function_code_signing_config_response(response) } } } /// Operation shape for `GetFunctionConcurrency`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_function_concurrency`](crate::client::Client::get_function_concurrency). /// /// See [`crate::client::fluent_builders::GetFunctionConcurrency`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetFunctionConcurrency { _private: (), } impl GetFunctionConcurrency { /// Creates a new builder-style object to manufacture [`GetFunctionConcurrencyInput`](crate::input::GetFunctionConcurrencyInput) pub fn builder() -> crate::input::get_function_concurrency_input::Builder { crate::input::get_function_concurrency_input::Builder::default() } /// Creates a new `GetFunctionConcurrency` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetFunctionConcurrency { type Output = std::result::Result< crate::output::GetFunctionConcurrencyOutput, crate::error::GetFunctionConcurrencyError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_function_concurrency_error(response) } else { crate::operation_deser::parse_get_function_concurrency_response(response) } } } /// Operation shape for `GetFunctionConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_function_configuration`](crate::client::Client::get_function_configuration). /// /// See [`crate::client::fluent_builders::GetFunctionConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetFunctionConfiguration { _private: (), } impl GetFunctionConfiguration { /// Creates a new builder-style object to manufacture [`GetFunctionConfigurationInput`](crate::input::GetFunctionConfigurationInput) pub fn builder() -> crate::input::get_function_configuration_input::Builder { crate::input::get_function_configuration_input::Builder::default() } /// Creates a new `GetFunctionConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetFunctionConfiguration { type Output = std::result::Result< crate::output::GetFunctionConfigurationOutput, crate::error::GetFunctionConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_function_configuration_error(response) } else { crate::operation_deser::parse_get_function_configuration_response(response) } } } /// Operation shape for `GetFunctionEventInvokeConfig`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_function_event_invoke_config`](crate::client::Client::get_function_event_invoke_config). /// /// See [`crate::client::fluent_builders::GetFunctionEventInvokeConfig`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetFunctionEventInvokeConfig { _private: (), } impl GetFunctionEventInvokeConfig { /// Creates a new builder-style object to manufacture [`GetFunctionEventInvokeConfigInput`](crate::input::GetFunctionEventInvokeConfigInput) pub fn builder() -> crate::input::get_function_event_invoke_config_input::Builder { crate::input::get_function_event_invoke_config_input::Builder::default() } /// Creates a new `GetFunctionEventInvokeConfig` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetFunctionEventInvokeConfig { type Output = std::result::Result< crate::output::GetFunctionEventInvokeConfigOutput, crate::error::GetFunctionEventInvokeConfigError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_function_event_invoke_config_error(response) } else { crate::operation_deser::parse_get_function_event_invoke_config_response(response) } } } /// Operation shape for `GetLayerVersion`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_layer_version`](crate::client::Client::get_layer_version). /// /// See [`crate::client::fluent_builders::GetLayerVersion`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetLayerVersion { _private: (), } impl GetLayerVersion { /// Creates a new builder-style object to manufacture [`GetLayerVersionInput`](crate::input::GetLayerVersionInput) pub fn builder() -> crate::input::get_layer_version_input::Builder { crate::input::get_layer_version_input::Builder::default() } /// Creates a new `GetLayerVersion` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetLayerVersion { type Output = std::result::Result< crate::output::GetLayerVersionOutput, crate::error::GetLayerVersionError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_layer_version_error(response) } else { crate::operation_deser::parse_get_layer_version_response(response) } } } /// Operation shape for `GetLayerVersionByArn`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_layer_version_by_arn`](crate::client::Client::get_layer_version_by_arn). /// /// See [`crate::client::fluent_builders::GetLayerVersionByArn`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetLayerVersionByArn { _private: (), } impl GetLayerVersionByArn { /// Creates a new builder-style object to manufacture [`GetLayerVersionByArnInput`](crate::input::GetLayerVersionByArnInput) pub fn builder() -> crate::input::get_layer_version_by_arn_input::Builder { crate::input::get_layer_version_by_arn_input::Builder::default() } /// Creates a new `GetLayerVersionByArn` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetLayerVersionByArn { type Output = std::result::Result< crate::output::GetLayerVersionByArnOutput, crate::error::GetLayerVersionByArnError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_layer_version_by_arn_error(response) } else { crate::operation_deser::parse_get_layer_version_by_arn_response(response) } } } /// Operation shape for `GetLayerVersionPolicy`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_layer_version_policy`](crate::client::Client::get_layer_version_policy). /// /// See [`crate::client::fluent_builders::GetLayerVersionPolicy`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetLayerVersionPolicy { _private: (), } impl GetLayerVersionPolicy { /// Creates a new builder-style object to manufacture [`GetLayerVersionPolicyInput`](crate::input::GetLayerVersionPolicyInput) pub fn builder() -> crate::input::get_layer_version_policy_input::Builder { crate::input::get_layer_version_policy_input::Builder::default() } /// Creates a new `GetLayerVersionPolicy` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetLayerVersionPolicy { type Output = std::result::Result< crate::output::GetLayerVersionPolicyOutput, crate::error::GetLayerVersionPolicyError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_layer_version_policy_error(response) } else { crate::operation_deser::parse_get_layer_version_policy_response(response) } } } /// Operation shape for `GetPolicy`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_policy`](crate::client::Client::get_policy). /// /// See [`crate::client::fluent_builders::GetPolicy`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetPolicy { _private: (), } impl GetPolicy { /// Creates a new builder-style object to manufacture [`GetPolicyInput`](crate::input::GetPolicyInput) pub fn builder() -> crate::input::get_policy_input::Builder { crate::input::get_policy_input::Builder::default() } /// Creates a new `GetPolicy` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetPolicy { type Output = std::result::Result<crate::output::GetPolicyOutput, crate::error::GetPolicyError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_policy_error(response) } else { crate::operation_deser::parse_get_policy_response(response) } } } /// Operation shape for `GetProvisionedConcurrencyConfig`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_provisioned_concurrency_config`](crate::client::Client::get_provisioned_concurrency_config). /// /// See [`crate::client::fluent_builders::GetProvisionedConcurrencyConfig`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetProvisionedConcurrencyConfig { _private: (), } impl GetProvisionedConcurrencyConfig { /// Creates a new builder-style object to manufacture [`GetProvisionedConcurrencyConfigInput`](crate::input::GetProvisionedConcurrencyConfigInput) pub fn builder() -> crate::input::get_provisioned_concurrency_config_input::Builder { crate::input::get_provisioned_concurrency_config_input::Builder::default() } /// Creates a new `GetProvisionedConcurrencyConfig` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetProvisionedConcurrencyConfig { type Output = std::result::Result< crate::output::GetProvisionedConcurrencyConfigOutput, crate::error::GetProvisionedConcurrencyConfigError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_provisioned_concurrency_config_error(response) } else { crate::operation_deser::parse_get_provisioned_concurrency_config_response(response) } } } /// Operation shape for `Invoke`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`invoke`](crate::client::Client::invoke). /// /// See [`crate::client::fluent_builders::Invoke`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct Invoke { _private: (), } impl Invoke { /// Creates a new builder-style object to manufacture [`InvokeInput`](crate::input::InvokeInput) pub fn builder() -> crate::input::invoke_input::Builder { crate::input::invoke_input::Builder::default() } /// Creates a new `Invoke` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for Invoke { type Output = std::result::Result<crate::output::InvokeOutput, crate::error::InvokeError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_invoke_error(response) } else { crate::operation_deser::parse_invoke_response(response) } } } /// Operation shape for `InvokeAsync`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`invoke_async`](crate::client::Client::invoke_async). /// /// See [`crate::client::fluent_builders::InvokeAsync`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct InvokeAsync { _private: (), } impl InvokeAsync { /// Creates a new builder-style object to manufacture [`InvokeAsyncInput`](crate::input::InvokeAsyncInput) pub fn builder() -> crate::input::invoke_async_input::Builder { crate::input::invoke_async_input::Builder::default() } /// Creates a new `InvokeAsync` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for InvokeAsync { type Output = std::result::Result<crate::output::InvokeAsyncOutput, crate::error::InvokeAsyncError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 202 { crate::operation_deser::parse_invoke_async_error(response) } else { crate::operation_deser::parse_invoke_async_response(response) } } } /// Operation shape for `ListAliases`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_aliases`](crate::client::Client::list_aliases). /// /// See [`crate::client::fluent_builders::ListAliases`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListAliases { _private: (), } impl ListAliases { /// Creates a new builder-style object to manufacture [`ListAliasesInput`](crate::input::ListAliasesInput) pub fn builder() -> crate::input::list_aliases_input::Builder { crate::input::list_aliases_input::Builder::default() } /// Creates a new `ListAliases` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListAliases { type Output = std::result::Result<crate::output::ListAliasesOutput, crate::error::ListAliasesError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_aliases_error(response) } else { crate::operation_deser::parse_list_aliases_response(response) } } } /// Operation shape for `ListCodeSigningConfigs`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_code_signing_configs`](crate::client::Client::list_code_signing_configs). /// /// See [`crate::client::fluent_builders::ListCodeSigningConfigs`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListCodeSigningConfigs { _private: (), } impl ListCodeSigningConfigs { /// Creates a new builder-style object to manufacture [`ListCodeSigningConfigsInput`](crate::input::ListCodeSigningConfigsInput) pub fn builder() -> crate::input::list_code_signing_configs_input::Builder { crate::input::list_code_signing_configs_input::Builder::default() } /// Creates a new `ListCodeSigningConfigs` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListCodeSigningConfigs { type Output = std::result::Result< crate::output::ListCodeSigningConfigsOutput, crate::error::ListCodeSigningConfigsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_code_signing_configs_error(response) } else { crate::operation_deser::parse_list_code_signing_configs_response(response) } } } /// Operation shape for `ListEventSourceMappings`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_event_source_mappings`](crate::client::Client::list_event_source_mappings). /// /// See [`crate::client::fluent_builders::ListEventSourceMappings`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListEventSourceMappings { _private: (), } impl ListEventSourceMappings { /// Creates a new builder-style object to manufacture [`ListEventSourceMappingsInput`](crate::input::ListEventSourceMappingsInput) pub fn builder() -> crate::input::list_event_source_mappings_input::Builder { crate::input::list_event_source_mappings_input::Builder::default() } /// Creates a new `ListEventSourceMappings` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListEventSourceMappings { type Output = std::result::Result< crate::output::ListEventSourceMappingsOutput, crate::error::ListEventSourceMappingsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_event_source_mappings_error(response) } else { crate::operation_deser::parse_list_event_source_mappings_response(response) } } } /// Operation shape for `ListFunctionEventInvokeConfigs`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_function_event_invoke_configs`](crate::client::Client::list_function_event_invoke_configs). /// /// See [`crate::client::fluent_builders::ListFunctionEventInvokeConfigs`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListFunctionEventInvokeConfigs { _private: (), } impl ListFunctionEventInvokeConfigs { /// Creates a new builder-style object to manufacture [`ListFunctionEventInvokeConfigsInput`](crate::input::ListFunctionEventInvokeConfigsInput) pub fn builder() -> crate::input::list_function_event_invoke_configs_input::Builder { crate::input::list_function_event_invoke_configs_input::Builder::default() } /// Creates a new `ListFunctionEventInvokeConfigs` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListFunctionEventInvokeConfigs { type Output = std::result::Result< crate::output::ListFunctionEventInvokeConfigsOutput, crate::error::ListFunctionEventInvokeConfigsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_function_event_invoke_configs_error(response) } else { crate::operation_deser::parse_list_function_event_invoke_configs_response(response) } } } /// Operation shape for `ListFunctions`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_functions`](crate::client::Client::list_functions). /// /// See [`crate::client::fluent_builders::ListFunctions`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListFunctions { _private: (), } impl ListFunctions { /// Creates a new builder-style object to manufacture [`ListFunctionsInput`](crate::input::ListFunctionsInput) pub fn builder() -> crate::input::list_functions_input::Builder { crate::input::list_functions_input::Builder::default() } /// Creates a new `ListFunctions` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListFunctions { type Output = std::result::Result<crate::output::ListFunctionsOutput, crate::error::ListFunctionsError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_functions_error(response) } else { crate::operation_deser::parse_list_functions_response(response) } } } /// Operation shape for `ListFunctionsByCodeSigningConfig`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_functions_by_code_signing_config`](crate::client::Client::list_functions_by_code_signing_config). /// /// See [`crate::client::fluent_builders::ListFunctionsByCodeSigningConfig`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListFunctionsByCodeSigningConfig { _private: (), } impl ListFunctionsByCodeSigningConfig { /// Creates a new builder-style object to manufacture [`ListFunctionsByCodeSigningConfigInput`](crate::input::ListFunctionsByCodeSigningConfigInput) pub fn builder() -> crate::input::list_functions_by_code_signing_config_input::Builder { crate::input::list_functions_by_code_signing_config_input::Builder::default() } /// Creates a new `ListFunctionsByCodeSigningConfig` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListFunctionsByCodeSigningConfig { type Output = std::result::Result< crate::output::ListFunctionsByCodeSigningConfigOutput, crate::error::ListFunctionsByCodeSigningConfigError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_functions_by_code_signing_config_error(response) } else { crate::operation_deser::parse_list_functions_by_code_signing_config_response(response) } } } /// Operation shape for `ListLayers`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_layers`](crate::client::Client::list_layers). /// /// See [`crate::client::fluent_builders::ListLayers`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListLayers { _private: (), } impl ListLayers { /// Creates a new builder-style object to manufacture [`ListLayersInput`](crate::input::ListLayersInput) pub fn builder() -> crate::input::list_layers_input::Builder { crate::input::list_layers_input::Builder::default() } /// Creates a new `ListLayers` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListLayers { type Output = std::result::Result<crate::output::ListLayersOutput, crate::error::ListLayersError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_layers_error(response) } else { crate::operation_deser::parse_list_layers_response(response) } } } /// Operation shape for `ListLayerVersions`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_layer_versions`](crate::client::Client::list_layer_versions). /// /// See [`crate::client::fluent_builders::ListLayerVersions`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListLayerVersions { _private: (), } impl ListLayerVersions { /// Creates a new builder-style object to manufacture [`ListLayerVersionsInput`](crate::input::ListLayerVersionsInput) pub fn builder() -> crate::input::list_layer_versions_input::Builder { crate::input::list_layer_versions_input::Builder::default() } /// Creates a new `ListLayerVersions` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListLayerVersions { type Output = std::result::Result< crate::output::ListLayerVersionsOutput, crate::error::ListLayerVersionsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_layer_versions_error(response) } else { crate::operation_deser::parse_list_layer_versions_response(response) } } } /// Operation shape for `ListProvisionedConcurrencyConfigs`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_provisioned_concurrency_configs`](crate::client::Client::list_provisioned_concurrency_configs). /// /// See [`crate::client::fluent_builders::ListProvisionedConcurrencyConfigs`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListProvisionedConcurrencyConfigs { _private: (), } impl ListProvisionedConcurrencyConfigs { /// Creates a new builder-style object to manufacture [`ListProvisionedConcurrencyConfigsInput`](crate::input::ListProvisionedConcurrencyConfigsInput) pub fn builder() -> crate::input::list_provisioned_concurrency_configs_input::Builder { crate::input::list_provisioned_concurrency_configs_input::Builder::default() } /// Creates a new `ListProvisionedConcurrencyConfigs` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListProvisionedConcurrencyConfigs { type Output = std::result::Result< crate::output::ListProvisionedConcurrencyConfigsOutput, crate::error::ListProvisionedConcurrencyConfigsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_provisioned_concurrency_configs_error(response) } else { crate::operation_deser::parse_list_provisioned_concurrency_configs_response(response) } } } /// Operation shape for `ListTags`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_tags`](crate::client::Client::list_tags). /// /// See [`crate::client::fluent_builders::ListTags`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListTags { _private: (), } impl ListTags { /// Creates a new builder-style object to manufacture [`ListTagsInput`](crate::input::ListTagsInput) pub fn builder() -> crate::input::list_tags_input::Builder { crate::input::list_tags_input::Builder::default() } /// Creates a new `ListTags` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListTags { type Output = std::result::Result<crate::output::ListTagsOutput, crate::error::ListTagsError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_tags_error(response) } else { crate::operation_deser::parse_list_tags_response(response) } } } /// Operation shape for `ListVersionsByFunction`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_versions_by_function`](crate::client::Client::list_versions_by_function). /// /// See [`crate::client::fluent_builders::ListVersionsByFunction`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListVersionsByFunction { _private: (), } impl ListVersionsByFunction { /// Creates a new builder-style object to manufacture [`ListVersionsByFunctionInput`](crate::input::ListVersionsByFunctionInput) pub fn builder() -> crate::input::list_versions_by_function_input::Builder { crate::input::list_versions_by_function_input::Builder::default() } /// Creates a new `ListVersionsByFunction` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListVersionsByFunction { type Output = std::result::Result< crate::output::ListVersionsByFunctionOutput, crate::error::ListVersionsByFunctionError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_versions_by_function_error(response) } else { crate::operation_deser::parse_list_versions_by_function_response(response) } } } /// Operation shape for `PublishLayerVersion`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`publish_layer_version`](crate::client::Client::publish_layer_version). /// /// See [`crate::client::fluent_builders::PublishLayerVersion`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PublishLayerVersion { _private: (), } impl PublishLayerVersion { /// Creates a new builder-style object to manufacture [`PublishLayerVersionInput`](crate::input::PublishLayerVersionInput) pub fn builder() -> crate::input::publish_layer_version_input::Builder { crate::input::publish_layer_version_input::Builder::default() } /// Creates a new `PublishLayerVersion` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PublishLayerVersion { type Output = std::result::Result< crate::output::PublishLayerVersionOutput, crate::error::PublishLayerVersionError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 201 { crate::operation_deser::parse_publish_layer_version_error(response) } else { crate::operation_deser::parse_publish_layer_version_response(response) } } } /// Operation shape for `PublishVersion`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`publish_version`](crate::client::Client::publish_version). /// /// See [`crate::client::fluent_builders::PublishVersion`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PublishVersion { _private: (), } impl PublishVersion { /// Creates a new builder-style object to manufacture [`PublishVersionInput`](crate::input::PublishVersionInput) pub fn builder() -> crate::input::publish_version_input::Builder { crate::input::publish_version_input::Builder::default() } /// Creates a new `PublishVersion` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PublishVersion { type Output = std::result::Result<crate::output::PublishVersionOutput, crate::error::PublishVersionError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 201 { crate::operation_deser::parse_publish_version_error(response) } else { crate::operation_deser::parse_publish_version_response(response) } } } /// Operation shape for `PutFunctionCodeSigningConfig`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_function_code_signing_config`](crate::client::Client::put_function_code_signing_config). /// /// See [`crate::client::fluent_builders::PutFunctionCodeSigningConfig`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutFunctionCodeSigningConfig { _private: (), } impl PutFunctionCodeSigningConfig { /// Creates a new builder-style object to manufacture [`PutFunctionCodeSigningConfigInput`](crate::input::PutFunctionCodeSigningConfigInput) pub fn builder() -> crate::input::put_function_code_signing_config_input::Builder { crate::input::put_function_code_signing_config_input::Builder::default() } /// Creates a new `PutFunctionCodeSigningConfig` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutFunctionCodeSigningConfig { type Output = std::result::Result< crate::output::PutFunctionCodeSigningConfigOutput, crate::error::PutFunctionCodeSigningConfigError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_function_code_signing_config_error(response) } else { crate::operation_deser::parse_put_function_code_signing_config_response(response) } } } /// Operation shape for `PutFunctionConcurrency`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_function_concurrency`](crate::client::Client::put_function_concurrency). /// /// See [`crate::client::fluent_builders::PutFunctionConcurrency`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutFunctionConcurrency { _private: (), } impl PutFunctionConcurrency { /// Creates a new builder-style object to manufacture [`PutFunctionConcurrencyInput`](crate::input::PutFunctionConcurrencyInput) pub fn builder() -> crate::input::put_function_concurrency_input::Builder { crate::input::put_function_concurrency_input::Builder::default() } /// Creates a new `PutFunctionConcurrency` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutFunctionConcurrency { type Output = std::result::Result< crate::output::PutFunctionConcurrencyOutput, crate::error::PutFunctionConcurrencyError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_function_concurrency_error(response) } else { crate::operation_deser::parse_put_function_concurrency_response(response) } } } /// Operation shape for `PutFunctionEventInvokeConfig`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_function_event_invoke_config`](crate::client::Client::put_function_event_invoke_config). /// /// See [`crate::client::fluent_builders::PutFunctionEventInvokeConfig`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutFunctionEventInvokeConfig { _private: (), } impl PutFunctionEventInvokeConfig { /// Creates a new builder-style object to manufacture [`PutFunctionEventInvokeConfigInput`](crate::input::PutFunctionEventInvokeConfigInput) pub fn builder() -> crate::input::put_function_event_invoke_config_input::Builder { crate::input::put_function_event_invoke_config_input::Builder::default() } /// Creates a new `PutFunctionEventInvokeConfig` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutFunctionEventInvokeConfig { type Output = std::result::Result< crate::output::PutFunctionEventInvokeConfigOutput, crate::error::PutFunctionEventInvokeConfigError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_function_event_invoke_config_error(response) } else { crate::operation_deser::parse_put_function_event_invoke_config_response(response) } } } /// Operation shape for `PutProvisionedConcurrencyConfig`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_provisioned_concurrency_config`](crate::client::Client::put_provisioned_concurrency_config). /// /// See [`crate::client::fluent_builders::PutProvisionedConcurrencyConfig`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutProvisionedConcurrencyConfig { _private: (), } impl PutProvisionedConcurrencyConfig { /// Creates a new builder-style object to manufacture [`PutProvisionedConcurrencyConfigInput`](crate::input::PutProvisionedConcurrencyConfigInput) pub fn builder() -> crate::input::put_provisioned_concurrency_config_input::Builder { crate::input::put_provisioned_concurrency_config_input::Builder::default() } /// Creates a new `PutProvisionedConcurrencyConfig` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutProvisionedConcurrencyConfig { type Output = std::result::Result< crate::output::PutProvisionedConcurrencyConfigOutput, crate::error::PutProvisionedConcurrencyConfigError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 202 { crate::operation_deser::parse_put_provisioned_concurrency_config_error(response) } else { crate::operation_deser::parse_put_provisioned_concurrency_config_response(response) } } } /// Operation shape for `RemoveLayerVersionPermission`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`remove_layer_version_permission`](crate::client::Client::remove_layer_version_permission). /// /// See [`crate::client::fluent_builders::RemoveLayerVersionPermission`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct RemoveLayerVersionPermission { _private: (), } impl RemoveLayerVersionPermission { /// Creates a new builder-style object to manufacture [`RemoveLayerVersionPermissionInput`](crate::input::RemoveLayerVersionPermissionInput) pub fn builder() -> crate::input::remove_layer_version_permission_input::Builder { crate::input::remove_layer_version_permission_input::Builder::default() } /// Creates a new `RemoveLayerVersionPermission` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for RemoveLayerVersionPermission { type Output = std::result::Result< crate::output::RemoveLayerVersionPermissionOutput, crate::error::RemoveLayerVersionPermissionError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_remove_layer_version_permission_error(response) } else { crate::operation_deser::parse_remove_layer_version_permission_response(response) } } } /// Operation shape for `RemovePermission`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`remove_permission`](crate::client::Client::remove_permission). /// /// See [`crate::client::fluent_builders::RemovePermission`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct RemovePermission { _private: (), } impl RemovePermission { /// Creates a new builder-style object to manufacture [`RemovePermissionInput`](crate::input::RemovePermissionInput) pub fn builder() -> crate::input::remove_permission_input::Builder { crate::input::remove_permission_input::Builder::default() } /// Creates a new `RemovePermission` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for RemovePermission { type Output = std::result::Result< crate::output::RemovePermissionOutput, crate::error::RemovePermissionError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_remove_permission_error(response) } else { crate::operation_deser::parse_remove_permission_response(response) } } } /// Operation shape for `TagResource`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`tag_resource`](crate::client::Client::tag_resource). /// /// See [`crate::client::fluent_builders::TagResource`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct TagResource { _private: (), } impl TagResource { /// Creates a new builder-style object to manufacture [`TagResourceInput`](crate::input::TagResourceInput) pub fn builder() -> crate::input::tag_resource_input::Builder { crate::input::tag_resource_input::Builder::default() } /// Creates a new `TagResource` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for TagResource { type Output = std::result::Result<crate::output::TagResourceOutput, crate::error::TagResourceError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_tag_resource_error(response) } else { crate::operation_deser::parse_tag_resource_response(response) } } } /// Operation shape for `UntagResource`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`untag_resource`](crate::client::Client::untag_resource). /// /// See [`crate::client::fluent_builders::UntagResource`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct UntagResource { _private: (), } impl UntagResource { /// Creates a new builder-style object to manufacture [`UntagResourceInput`](crate::input::UntagResourceInput) pub fn builder() -> crate::input::untag_resource_input::Builder { crate::input::untag_resource_input::Builder::default() } /// Creates a new `UntagResource` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for UntagResource { type Output = std::result::Result<crate::output::UntagResourceOutput, crate::error::UntagResourceError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_untag_resource_error(response) } else { crate::operation_deser::parse_untag_resource_response(response) } } } /// Operation shape for `UpdateAlias`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`update_alias`](crate::client::Client::update_alias). /// /// See [`crate::client::fluent_builders::UpdateAlias`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct UpdateAlias { _private: (), } impl UpdateAlias { /// Creates a new builder-style object to manufacture [`UpdateAliasInput`](crate::input::UpdateAliasInput) pub fn builder() -> crate::input::update_alias_input::Builder { crate::input::update_alias_input::Builder::default() } /// Creates a new `UpdateAlias` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for UpdateAlias { type Output = std::result::Result<crate::output::UpdateAliasOutput, crate::error::UpdateAliasError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_update_alias_error(response) } else { crate::operation_deser::parse_update_alias_response(response) } } } /// Operation shape for `UpdateCodeSigningConfig`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`update_code_signing_config`](crate::client::Client::update_code_signing_config). /// /// See [`crate::client::fluent_builders::UpdateCodeSigningConfig`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct UpdateCodeSigningConfig { _private: (), } impl UpdateCodeSigningConfig { /// Creates a new builder-style object to manufacture [`UpdateCodeSigningConfigInput`](crate::input::UpdateCodeSigningConfigInput) pub fn builder() -> crate::input::update_code_signing_config_input::Builder { crate::input::update_code_signing_config_input::Builder::default() } /// Creates a new `UpdateCodeSigningConfig` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for UpdateCodeSigningConfig { type Output = std::result::Result< crate::output::UpdateCodeSigningConfigOutput, crate::error::UpdateCodeSigningConfigError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_update_code_signing_config_error(response) } else { crate::operation_deser::parse_update_code_signing_config_response(response) } } } /// Operation shape for `UpdateEventSourceMapping`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`update_event_source_mapping`](crate::client::Client::update_event_source_mapping). /// /// See [`crate::client::fluent_builders::UpdateEventSourceMapping`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct UpdateEventSourceMapping { _private: (), } impl UpdateEventSourceMapping { /// Creates a new builder-style object to manufacture [`UpdateEventSourceMappingInput`](crate::input::UpdateEventSourceMappingInput) pub fn builder() -> crate::input::update_event_source_mapping_input::Builder { crate::input::update_event_source_mapping_input::Builder::default() } /// Creates a new `UpdateEventSourceMapping` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for UpdateEventSourceMapping { type Output = std::result::Result< crate::output::UpdateEventSourceMappingOutput, crate::error::UpdateEventSourceMappingError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 202 { crate::operation_deser::parse_update_event_source_mapping_error(response) } else { crate::operation_deser::parse_update_event_source_mapping_response(response) } } } /// Operation shape for `UpdateFunctionCode`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`update_function_code`](crate::client::Client::update_function_code). /// /// See [`crate::client::fluent_builders::UpdateFunctionCode`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct UpdateFunctionCode { _private: (), } impl UpdateFunctionCode { /// Creates a new builder-style object to manufacture [`UpdateFunctionCodeInput`](crate::input::UpdateFunctionCodeInput) pub fn builder() -> crate::input::update_function_code_input::Builder { crate::input::update_function_code_input::Builder::default() } /// Creates a new `UpdateFunctionCode` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for UpdateFunctionCode { type Output = std::result::Result< crate::output::UpdateFunctionCodeOutput, crate::error::UpdateFunctionCodeError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_update_function_code_error(response) } else { crate::operation_deser::parse_update_function_code_response(response) } } } /// Operation shape for `UpdateFunctionConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`update_function_configuration`](crate::client::Client::update_function_configuration). /// /// See [`crate::client::fluent_builders::UpdateFunctionConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct UpdateFunctionConfiguration { _private: (), } impl UpdateFunctionConfiguration { /// Creates a new builder-style object to manufacture [`UpdateFunctionConfigurationInput`](crate::input::UpdateFunctionConfigurationInput) pub fn builder() -> crate::input::update_function_configuration_input::Builder { crate::input::update_function_configuration_input::Builder::default() } /// Creates a new `UpdateFunctionConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for UpdateFunctionConfiguration { type Output = std::result::Result< crate::output::UpdateFunctionConfigurationOutput, crate::error::UpdateFunctionConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_update_function_configuration_error(response) } else { crate::operation_deser::parse_update_function_configuration_response(response) } } } /// Operation shape for `UpdateFunctionEventInvokeConfig`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`update_function_event_invoke_config`](crate::client::Client::update_function_event_invoke_config). /// /// See [`crate::client::fluent_builders::UpdateFunctionEventInvokeConfig`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct UpdateFunctionEventInvokeConfig { _private: (), } impl UpdateFunctionEventInvokeConfig { /// Creates a new builder-style object to manufacture [`UpdateFunctionEventInvokeConfigInput`](crate::input::UpdateFunctionEventInvokeConfigInput) pub fn builder() -> crate::input::update_function_event_invoke_config_input::Builder { crate::input::update_function_event_invoke_config_input::Builder::default() } /// Creates a new `UpdateFunctionEventInvokeConfig` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for UpdateFunctionEventInvokeConfig { type Output = std::result::Result< crate::output::UpdateFunctionEventInvokeConfigOutput, crate::error::UpdateFunctionEventInvokeConfigError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_update_function_event_invoke_config_error(response) } else { crate::operation_deser::parse_update_function_event_invoke_config_response(response) } } }
// NewLimiter returns a custom limiter witout Strict mode func NewLimiter(window int, burst int) *Limiter { return newLimiter(Policy{ Window: window, Burst: burst, Strict: false, }) }
<filename>Example/KXModuleOrz/KXViewController.h // // KXViewController.h // KXModuleOrz // // Created by XiangqiTu on 09/23/2020. // Copyright (c) 2020 XiangqiTu. All rights reserved. // @import UIKit; @interface KXViewController : UIViewController @end
//F6164 //F6165 FUNCTION getGHGConc( ghgNumber, inYear ) float GETGHGCONC( int ghgNumber, int inYear ) { f_enter( __func__ ); assert( G_CARB != NULL && G_TANDSL != NULL && G_CONCS != NULL && G_NEWCONCS != NULL && G_STOREDVALS != NULL && G_NEWPARAMS != NULL && G_BCOC != NULL && G_METH1 != NULL && G_CAR != NULL && G_FORCE != NULL && G_JSTART != NULL && G_QADD != NULL && G_HALOF != NULL ); const int IYR = inYear - 1990 + 226; float returnValue; switch( ghgNumber ) { case 1: returnValue = G_CARB->CO2[ IYR ]; break; case 2: returnValue = G_CONCS->CH4[ IYR ]; break; case 3: returnValue = G_CONCS->CN2O[ IYR ]; break; case 4: returnValue = G_NEWCONCS->C2F6[ IYR ]; break; case 5: returnValue = G_NEWCONCS->C125[ IYR ]; break; case 6: returnValue = G_NEWCONCS->C134A[ IYR ]; break; case 7: returnValue = G_NEWCONCS->C143A[ IYR ]; break; case 8: returnValue = G_NEWCONCS->C245[ IYR ]; break; case 9: returnValue = G_NEWCONCS->CSF6[ IYR ]; break; case 10: returnValue = G_NEWCONCS->CF4[ IYR ]; break; case 11: returnValue = G_NEWCONCS->C227[ IYR ]; break; default: returnValue = std::numeric_limits<float>::max(); cerr << __func__ << " undefined gas " << ghgNumber << flush; } return( returnValue ); f_exit( __func__ ); }
/** * Switches between the display pane to the edit pane. */ private void switchPane() { if (mediaList.getSelectionModel().getSelectedItem() != null) { if (mediaList.getSelectionModel().getSelectedItem() instanceof Film) { mediaEditType = "film"; } else if (mediaList.getSelectionModel().getSelectedItem() instanceof TvShow) { mediaEditType = "tv"; } else { mediaList.getSelectionModel().select(0); } if (editToggleButton.isSelected()) { editToggleButton.setText("Save"); fetchButton.setDisable(false); switch (mediaEditType) { case "tv": masterSeasonList = setTvEditPane(); tvEditPane.setVisible(true); tvEditPane.setDisable(false); filmDisplayPane.setVisible(false); tvDisplayPane.setVisible(false); filmEditPane.setVisible(false); filmEditPane.setDisable(true); break; case "film": setFilmEditPane(); filmEditPane.setVisible(true); filmEditPane.setDisable(false); filmDisplayPane.setVisible(false); tvDisplayPane.setVisible(false); tvEditPane.setVisible(false); tvEditPane.setDisable(true); break; } } else { editToggleButton.setText("Edit"); fetchButton.setDisable(true); switch (mediaEditType) { case "tv": masterSeasonList = setTvDisplayPane(); tvDisplayPane.setVisible(true); filmEditPane.setVisible(false); filmEditPane.setDisable(true); filmDisplayPane.setVisible(false); tvEditPane.setVisible(false); tvEditPane.setDisable(true); break; case "film": setFilmDisplayPane(); filmDisplayPane.setVisible(true); filmEditPane.setVisible(false); filmEditPane.setDisable(true); tvDisplayPane.setVisible(false); tvEditPane.setVisible(false); tvEditPane.setDisable(true); break; } } } }
// SetBorderWidth function sets width of the Table's border and it's rows and cells func (t *Table) SetBorderWidth(value int) *Table { t.borderWidth = value for tr := range t.data { t.data[tr].SetBorderWidth(value) } return t }
<reponame>ahmedibrahimq/ng-cart import { async, ComponentFixture, TestBed } from '@angular/core/testing'; import { ProductQtyUpdaterComponent } from './product-qty-updater.component'; describe('ProductQtyUpdaterComponent', () => { let component: ProductQtyUpdaterComponent; let fixture: ComponentFixture<ProductQtyUpdaterComponent>; beforeEach(async(() => { TestBed.configureTestingModule({ declarations: [ ProductQtyUpdaterComponent ] }) .compileComponents(); })); beforeEach(() => { fixture = TestBed.createComponent(ProductQtyUpdaterComponent); component = fixture.componentInstance; fixture.detectChanges(); }); it('should create', () => { expect(component).toBeTruthy(); }); });
// ProtoToInstanceTemplatePropertiesDisksInitializeParams converts a InstanceTemplatePropertiesDisksInitializeParams resource from its proto representation. func ProtoToComputeInstanceTemplatePropertiesDisksInitializeParams(p *computepb.ComputeInstanceTemplatePropertiesDisksInitializeParams) *compute.InstanceTemplatePropertiesDisksInitializeParams { if p == nil { return nil } obj := &compute.InstanceTemplatePropertiesDisksInitializeParams{ DiskName: dcl.StringOrNil(p.DiskName), DiskSizeGb: dcl.Int64OrNil(p.DiskSizeGb), DiskType: dcl.StringOrNil(p.DiskType), SourceImage: dcl.StringOrNil(p.SourceImage), SourceSnapshot: dcl.StringOrNil(p.SourceSnapshot), SourceSnapshotEncryptionKey: ProtoToComputeInstanceTemplatePropertiesDisksInitializeParamsSourceSnapshotEncryptionKey(p.GetSourceSnapshotEncryptionKey()), Description: dcl.StringOrNil(p.Description), OnUpdateAction: dcl.StringOrNil(p.OnUpdateAction), SourceImageEncryptionKey: ProtoToComputeInstanceTemplatePropertiesDisksInitializeParamsSourceImageEncryptionKey(p.GetSourceImageEncryptionKey()), } for _, r := range p.GetResourcePolicies() { obj.ResourcePolicies = append(obj.ResourcePolicies, r) } return obj }
<gh_stars>0 import React from 'react'; import styled from 'styled-components'; import media from 'styled-media-query'; import { BoxProps, getBoxExpression } from './Box'; import { ScreenType, ScreenValue } from '@/constants'; import { GatsbyImage } from '@/atoms/GatsbyImage'; import { isNumber } from 'lodash'; import { IntersectionFadeIn, ReverseParallax } from '@/effects'; type NumberOrString = number | string; export type GridContainerProps = { columns?: number; rowGap?: NumberOrString; rowGapSmall?: NumberOrString; columnGap?: NumberOrString; columnGapSmall?: NumberOrString; }; const withUnit = (value: NumberOrString): string => (isNumber(value) ? `${value}px` : value); export const GridContainer = styled.div<GridContainerProps>` display: grid; grid-auto-rows: 3.5vw; grid-column-gap: ${({ columnGap = '' }) => withUnit(columnGap)}; grid-row-gap: ${({ rowGap = '' }) => withUnit(rowGap)}; grid-template-columns: repeat(12, 1fr); grid-template-rows: 3.5vw; ${media.greaterThan<GridContainerProps>(ScreenType.LARGE)` width: ${ScreenValue.LARGE}px; margin: 0 auto; grid-column-gap: ${({ columnGapSmall = '' }) => withUnit(columnGapSmall)}; grid-row-gap: ${({ rowGapSmall = '' }) => withUnit(rowGapSmall)}; grid-auto-rows: 40px; grid-template-rows: 40px; `}; ${media.lessThan(ScreenType.MEDIUM)` grid-auto-rows: 8vw; grid-template-rows: 8vw; `}; `; type GridColumnRange = 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13; export type GridOption = { rowStart?: number; rowEnd?: number; columnStart?: GridColumnRange; columnEnd?: GridColumnRange; }; export type GridItemProps = { grid?: GridOption; gridSmall?: GridOption; box?: BoxProps; boxSmall?: BoxProps; centering?: boolean; }; export const GridItem = styled.div<GridItemProps>` display: block; ${({ grid }) => { if (!grid) return ''; const { rowStart = 1, rowEnd = 12, columnStart = 1, columnEnd = 13 } = grid; return ` grid-row: ${rowStart} / ${rowEnd}; grid-column: ${columnStart} / ${columnEnd}; `; }} ${({ gridSmall }) => { if (!gridSmall) return ''; const { rowStart = 1, rowEnd = 12, columnStart = 1, columnEnd = 13 } = gridSmall; return ` @media(max-width: ${ScreenValue.SMALL}px) { grid-row: ${rowStart} / ${rowEnd}; grid-column: ${columnStart} / ${columnEnd}; } `; }} ${({ box }) => (box ? getBoxExpression(box) : '')} @media(max-width: ${ScreenValue.SMALL}px) { ${({ boxSmall }) => (boxSmall ? getBoxExpression(boxSmall) : '')} } ${({ centering }) => centering ? ` display: flex; justify-content: center; align-items: center; ` : ''} `; type GridImageProps = { src: string; speed?: number; }; export const GridImage: React.FC<GridImageProps> = ({ src, speed }) => { return ( <GridImageContainer> <ReverseParallax zoom={1.2} fillLayout coefficient={speed}> <IntersectionFadeIn slideIn fillLayout> <GatsbyImage relativePath={src} /> </IntersectionFadeIn> </ReverseParallax> </GridImageContainer> ); }; const GridImageContainer = styled.div` position: relative; width: 100%; height: 100%; overflow: hidden; `;
class TestCreateModels: """Class for model initialization test suite.""" @pytest.mark.unit def test_record_model_create(self, test_recordmodel): """should return a record model instance.""" assert isinstance(test_recordmodel, RAMSTKMissionPhaseRecord) # Verify class attributes are properly initialized. assert test_recordmodel.__tablename__ == "ramstk_mission_phase" assert test_recordmodel.revision_id == 1 assert test_recordmodel.description == "Phase #1 for mission #1" assert test_recordmodel.name == "Start Up" assert test_recordmodel.phase_start == 0.0 assert test_recordmodel.phase_end == 0.0 @pytest.mark.unit def test_table_model_create(self, test_tablemodel): """should return a table model instance.""" assert isinstance(test_tablemodel, RAMSTKMissionPhaseTable) assert isinstance(test_tablemodel.tree, Tree) assert isinstance(test_tablemodel.dao, MockDAO) assert test_tablemodel._db_id_colname == "fld_phase_id" assert test_tablemodel._db_tablename == "ramstk_mission_phase" assert test_tablemodel._tag == "mission_phase" assert test_tablemodel._root == 0 assert test_tablemodel._revision_id == 0 assert pub.isSubscribed( test_tablemodel.do_get_attributes, "request_get_mission_phase_attributes" ) assert pub.isSubscribed( test_tablemodel.do_get_tree, "request_get_mission_phase_tree" ) assert pub.isSubscribed(test_tablemodel.do_select_all, "selected_revision") assert pub.isSubscribed( test_tablemodel.do_update, "request_update_mission_phase" ) assert pub.isSubscribed( test_tablemodel.do_update_all, "request_update_all_mission_phase" ) assert pub.isSubscribed( test_tablemodel.do_delete, "request_delete_mission_phase" ) assert pub.isSubscribed( test_tablemodel.do_insert, "request_insert_mission_phase" )
/** * Execute the Perforce fix command with jobs, changelist and options. Log * the returned fixes. * <p> * Mark each named job as being fixed by the changelist number given with * changelist. * * @throws BuildException * the build exception * @see PerforceTask#execP4Command() */ protected void execP4Command() throws BuildException { if (isEmpty(jobs)) { throw new BuildException("No jobs specified."); } if (parseChangelist(changelist) == IChangelist.DEFAULT) { throw new BuildException( "Cannot fix jobs with the default changelist."); } if (parseChangelist(changelist) < 0) { throw new BuildException("Must enter a valid changelist."); } try { retFixList = getP4Server().fixJobs(getJobs(), parseChangelist(changelist), ((FixJobsOptions) commandOptions)); logFixes(retFixList); } catch (P4JavaException e) { throw new BuildException(e.getLocalizedMessage(), e, getLocation()); } catch (P4JavaError e) { throw new BuildException(e.getLocalizedMessage(), e, getLocation()); } catch (Throwable t) { throw new BuildException(t.getLocalizedMessage(), t, getLocation()); } }
<filename>src/getFileSections.ts // TYPES // UnparsedAccountEntry // UnparsedAccountNumberChunk const hasValidLineLength = (line: string) => line.length === 27 export default function getFileSections(lines: string[]): string[][] { let entries = []; // for loop, skipping every 4 lines for (let i = 0; i < lines.length; i += 4) { if (lines.length > i + 3) { const accountLines = lines.slice(i, i + 3); //first through the an array of 3 strings console.log('accountLines length', accountLines.length); entries.push(accountLines); if (accountLines.every(hasValidLineLength)) { // check and make sure has is 27 characters long... entries.push(accountLines); } //(TODO: see why there's an invalid character. Commented out for now.) else { //...otherwise, don't include it and alert the user. console.error(`the account \n${accountLines.join('\n')} contains an invalid charater`); } } }; return entries; }
import { Component } from '@angular/core'; import {AuthService} from './auth/services/auth.service'; import {Router,NavigationEnd} from '@angular/router' @Component({ selector: 'app-root', templateUrl: './app.component.html', styleUrls: ['./app.component.scss'] }) export class AppComponent { user=null; editor:boolean = false; constructor(private authService:AuthService,private router:Router){ this.authService.currentUser$.subscribe((user)=>{ if(!user){ this.user = null; }else{ this.user = user; } }) this.router.events.subscribe((event)=>{ if (event instanceof NavigationEnd) { if(event.url ==='/edit-image'){ this.editor = true; }else{ this.editor = false; } } }) } }
/** * JAVADOC Method Level Comments * * @param arg0 JAVADOC. */ @Override public void onApplicationEvent(ContextRefreshedEvent arg0) { Message<?> message = MessageBuilder.withPayload(new RegistrationEvent(applicationName, applicationName, "jms://" + myQueue)) .setHeader(Operative.DESTINATION_NAME, "server.queue") .build(); asyncChannel.send(message); }
// of this matrix. Return an invalid matrix on error. public Matrix transpose() { if (!(valid())) { return (new Matrix().invalidate()); } else { Matrix transM = new Matrix(cols, rows); for (int i = 0; i < rows; i++) for (int j = 0; j < cols; j++) transM.el[j][i] = el[i][j]; return (transM); } }
<gh_stars>0 import javax.swing.JFrame; ////////////////////////////////////////////////////////////// PaintDemo class PaintDemo { //============================================================= main public static void main(String[] args) { PaintWindow window = new PaintWindow(); window.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); window.setVisible(true); }//end main }//endclass PaintDemo
Thermal desorption ambient ionization mass spectrometry for emergency toxicology. In the emergency department, it is important to rapidly identify the toxic substances that have led to acute poisoning because different toxicants or toxins cause poisoning through different mechanisms, requiring disparate therapeutic strategies and precautions against contraindicating actions, and diverse directions of clinical course monitoring and prediction of prognosis. Ambient ionization mass spectrometry, a state-of-the-art technology, has been proved to be a fast, accurate, and user-friendly tool for rapidly identifying toxicants like residual pesticides on fruits and vegetables. In view of this, developing an analytical platform that explores the application of such a cutting-edge technology in a novel direction has been initiated a research program, namely, the rapid identification of toxic substances which might have caused acute poisoning in patients who visit the emergency department and requires an accurate diagnosis for correct clinical decision-making to bring about corresponding data-guided management. This review includes (i) a narrative account of the breakthrough in emergency toxicology brought about by the advent of ambient ionization mass spectrometry and (ii) a thorough discussion about the clinical implications and technical limitations of such a promising innovation for promoting toxicological tests from tier two-level to tier one level.
/** * @author Pedro Ruivo * @since 5.2 */ @Test(groups = "functional", testName = "tx.gmu.totalorder.TotalOrderDistSimpleTest") public class TotalOrderDistSimpleTest extends DistSimpleTest { public void testTotalOrderManager() { TotalOrderManager totalOrderManager = TestingUtil.extractComponent(cache(0), TotalOrderManager.class); Assert.assertTrue(totalOrderManager instanceof GMUTotalOrderManager, "Wrong total order manager"); } public void testReadReadDependency() throws Exception { final Object key01 = newKey(0); final Object key02 = newKey(0); final Object key11 = newKey(1); final Object key12 = newKey(1); //init the keys tm(0).begin(); cache(0).put(key01, VALUE_1); cache(0).put(key02, VALUE_1); cache(0).put(key11, VALUE_1); cache(0).put(key12, VALUE_1); tm(0).commit(); try { final CommandBlocker blocker = addCommandBlocker(cache(0)); blocker.reset(); tm(0).begin(); //RS={01,02,12}, WS={01} Assert.assertEquals(cache(0).get(key01), VALUE_1); Assert.assertEquals(cache(0).get(key02), VALUE_1); Assert.assertEquals(cache(0).get(key12), VALUE_1); final GlobalTransaction gtx1 = globalTransaction(0); final Transaction tx1 = tm(0).suspend(); tm(0).begin(); //RS={02,11,12}, WS={11} Assert.assertEquals(cache(0).get(key02), VALUE_1); Assert.assertEquals(cache(0).get(key11), VALUE_1); Assert.assertEquals(cache(0).get(key12), VALUE_1); final GlobalTransaction gtx2 = globalTransaction(0); final Transaction tx2 = tm(0).suspend(); blocker.block(gtx1); blocker.block(gtx2); final Future<Boolean> ftx1 = fork(new Callable<Boolean>() { @Override public Boolean call() throws Exception { tm(0).resume(tx1); cache(0).put(key01, VALUE_2); tm(0).commit(); return Boolean.TRUE; } }); final Future<Boolean> ftx2 = fork(new Callable<Boolean>() { @Override public Boolean call() throws Exception { tm(0).resume(tx2); cache(0).put(key11, VALUE_2); tm(0).commit(); return Boolean.TRUE; } }); Assert.assertTrue(blocker.await(gtx1, 10000), gtx1 + " was never received!"); Assert.assertTrue(blocker.await(gtx2, 10000), gtx2 + " was never received!"); blocker.unblock(gtx1); blocker.unblock(gtx2); Assert.assertTrue(ftx1.get()); Assert.assertTrue(ftx2.get()); assertCachesValue(0, key01, VALUE_2); assertCachesValue(0, key02, VALUE_1); assertCachesValue(0, key11, VALUE_2); assertCachesValue(0, key12, VALUE_1); assertNoTransactions(); assertNoLocks(); } finally { removeCommandBlocked(cache(0)); } } public void testReadWriteDependency() throws Exception { final Object key01 = newKey(0); final Object key02 = newKey(0); final Object key11 = newKey(1); final Object key12 = newKey(1); //init the keys tm(0).begin(); cache(0).put(key01, VALUE_1); cache(0).put(key02, VALUE_1); cache(0).put(key11, VALUE_1); cache(0).put(key12, VALUE_1); tm(0).commit(); try { final CommandBlocker blocker = addCommandBlocker(cache(0)); blocker.reset(); tm(0).begin(); //RS={01,11,02,12}, WS={01} Assert.assertEquals(cache(0).get(key01), VALUE_1); Assert.assertEquals(cache(0).get(key11), VALUE_1); Assert.assertEquals(cache(0).get(key02), VALUE_1); Assert.assertEquals(cache(0).get(key12), VALUE_1); final GlobalTransaction gtx1 = globalTransaction(0); final Transaction tx1 = tm(0).suspend(); tm(0).begin(); //RS={02,11,12}, WS={11} //writes on a key read by the other Assert.assertEquals(cache(0).get(key02), VALUE_1); Assert.assertEquals(cache(0).get(key11), VALUE_1); Assert.assertEquals(cache(0).get(key12), VALUE_1); final GlobalTransaction gtx2 = globalTransaction(0); final Transaction tx2 = tm(0).suspend(); blocker.block(gtx1); blocker.block(gtx2); final Future<Boolean> ftx1 = fork(new Callable<Boolean>() { @Override public Boolean call() throws Exception { tm(0).resume(tx1); cache(0).put(key01, VALUE_2); tm(0).commit(); return Boolean.TRUE; } }); Assert.assertTrue(blocker.await(gtx1, 10000), gtx1 + " was never received!"); final Future<Boolean> ftx2 = fork(new Callable<Boolean>() { @Override public Boolean call() throws Exception { tm(0).resume(tx2); cache(0).put(key11, VALUE_2); tm(0).commit(); return Boolean.TRUE; } }); Assert.assertFalse(blocker.await(gtx2, 5000), gtx2 + " was received and it should block"); blocker.unblock(gtx1); blocker.unblock(gtx2); Assert.assertTrue(ftx1.get()); Assert.assertTrue(ftx2.get()); assertCachesValue(0, key01, VALUE_2); assertCachesValue(0, key02, VALUE_1); assertCachesValue(0, key11, VALUE_2); assertCachesValue(0, key12, VALUE_1); assertNoTransactions(); assertNoLocks(); } finally { removeCommandBlocked(cache(0)); } } public void testWriteWriteDependency() throws Exception { final Object key01 = newKey(0); final Object key02 = newKey(0); final Object key11 = newKey(1); final Object key12 = newKey(1); //init the keys tm(0).begin(); cache(0).put(key01, VALUE_1); cache(0).put(key02, VALUE_1); cache(0).put(key11, VALUE_1); cache(0).put(key12, VALUE_1); tm(0).commit(); try { final CommandBlocker blocker = addCommandBlocker(cache(0)); blocker.reset(); tm(0).begin(); //RS={01,11,12}, WS={01} Assert.assertEquals(cache(0).get(key02), VALUE_1); Assert.assertEquals(cache(0).get(key11), VALUE_1); Assert.assertEquals(cache(0).get(key12), VALUE_1); final GlobalTransaction gtx1 = globalTransaction(0); final Transaction tx1 = tm(0).suspend(); tm(0).begin(); //RS={02,11,12}, WS={01} //writes on a key write by the other Assert.assertEquals(cache(0).get(key02), VALUE_1); Assert.assertEquals(cache(0).get(key11), VALUE_1); Assert.assertEquals(cache(0).get(key12), VALUE_1); final GlobalTransaction gtx2 = globalTransaction(0); final Transaction tx2 = tm(0).suspend(); blocker.block(gtx1); blocker.block(gtx2); final Future<Boolean> ftx1 = fork(new Callable<Boolean>() { @Override public Boolean call() throws Exception { tm(0).resume(tx1); cache(0).put(key01, VALUE_2); tm(0).commit(); return Boolean.TRUE; } }); Assert.assertTrue(blocker.await(gtx1, 10000), gtx1 + " was never received!"); final Future<Boolean> ftx2 = fork(new Callable<Boolean>() { @Override public Boolean call() throws Exception { tm(0).resume(tx2); cache(0).put(key01, VALUE_2); tm(0).commit(); return Boolean.TRUE; } }); Assert.assertFalse(blocker.await(gtx2, 5000), gtx2 + " was received and it should block"); blocker.unblock(gtx1); blocker.unblock(gtx2); Assert.assertTrue(ftx1.get()); try { ftx2.get(); Assert.fail("Transaction should fail!"); } catch (Exception e) { //expected } assertCachesValue(0, key01, VALUE_2); assertCachesValue(0, key02, VALUE_1); assertCachesValue(0, key11, VALUE_1); assertCachesValue(0, key12, VALUE_1); assertNoTransactions(); assertNoLocks(); } finally { removeCommandBlocked(cache(0)); } } @Override protected void decorate(ConfigurationBuilder builder) { super.decorate(builder); builder.transaction().transactionProtocol(TransactionProtocol.TOTAL_ORDER) .recovery().disable(); } protected final void assertNoLocks() { eventually(new Condition() { @Override public boolean isSatisfied() throws Exception { for (Cache cache : caches()) { if (TestingUtil.extractComponent(cache, TotalOrderManager.class).hasAnyLockAcquired()) { return false; } } return true; } }); } private CommandBlocker addCommandBlocker(Cache cache) { InterceptorChain chain = TestingUtil.extractComponent(cache, InterceptorChain.class); CommandBlocker blocker = new CommandBlocker(); chain.addInterceptor(blocker, 0); return blocker; } private void removeCommandBlocked(Cache cache) { InterceptorChain chain = TestingUtil.extractComponent(cache, InterceptorChain.class); chain.removeInterceptor(CommandBlocker.class); } }
def _register_factor_group( self, factor_group: groups.FactorGroup, name: Optional[str] = None ) -> None: if name in self._named_factor_groups: raise ValueError( f"A factor group with the name {name} already exists. Please choose a different name!" ) self._factor_group_to_msgs_starts[factor_group] = self._total_factor_num_states self._factor_group_to_potentials_starts[ factor_group ] = self._total_factor_num_configs factor_num_states_cumsum = np.insert( factor_group.factor_num_states.cumsum(), 0, 0 ) factor_group_num_configs = 0 for vv, variables in enumerate(factor_group._variables_to_factors): if variables in self._variables_to_factors: raise ValueError( f"A factor involving variables {variables} already exists. Please merge the corresponding factors." ) factor = factor_group._variables_to_factors[variables] self._variables_to_factors[variables] = factor self._factor_to_msgs_starts[factor] = ( self._factor_group_to_msgs_starts[factor_group] + factor_num_states_cumsum[vv] ) self._factor_to_potentials_starts[factor] = ( self._factor_group_to_potentials_starts[factor_group] + vv * factor.log_potentials.shape[0] ) factor_group_num_configs += factor.log_potentials.shape[0] self._total_factor_num_states += factor_num_states_cumsum[-1] self._total_factor_num_configs += factor_group_num_configs if name is not None: self._named_factor_groups[name] = factor_group
def _wait_util(self, expect, func, *args, **kwargs): left = self._wait_retry while left > 0: if func(*args, **kwargs) == expect: return else: left = left - 1 sleep(self._interval) raise Exception("Timeout waiting for state change to %s" % expect)
<filename>src/main/java/com/arkaces/aces_listener_ethereum/ethereum_rpc/RpcRequestFactory.java package com.arkaces.aces_listener_ethereum.ethereum_rpc; import com.arkaces.aces_listener_ethereum.RpcRequest; import org.springframework.stereotype.Service; import java.util.List; @Service public class RpcRequestFactory { public RpcRequest create(String method, List<Object> params) { RpcRequest request = new RpcRequest(); request.setMethod(method); request.setParams(params); return request; } }
/* Author haleyk10198 */ /* �@��: haleyk10198 */ /* CF handle: haleyk100198*/ /* FOR ACM-ICPC WF*/ #include <bits/stdc++.h> using namespace std; using ll = long long; using vi = vector<int>; using vvi = vector<vi>; using pii = pair<int, int>; #define pb push_back constexpr auto MOD = 1000000007LL; constexpr auto LINF = (1LL<<60); constexpr auto INF = 2147483647LL; constexpr auto PI = 3.1415926535897932384626433; constexpr auto EPS = 1E-9; template<typename T1, typename T2> ostream& operator<<(ostream& out, const pair<T1, T2> p){ out << p.first << ' ' << p.second; return out; } template <typename T1, typename T2> istream& operator>>(istream& in, pair<T1, T2> &p){ in >> p.first >> p.second; return in; } template <typename T> istream& operator>>(istream &in, vector<T> &v){ for(auto &x: v) in >> x; return in; } template <typename T> ostream& operator<<(ostream &out, vector<T> v){ for(int i = 0; i < v.size(); i++) out << v[i] << (i+1 == v.size()? '\n': ' '); out.flush(); return out; } #if __cplusplus < 201703L #define mp make_pair #endif using point_t = double; struct point{ point_t x, y; point(){ x = y = 0; } point(point_t x, point_t y):x(x), y(y){ } inline bool operator < (point rhs) const{ if(fabs(x-rhs.x) > EPS) return x < rhs.x; return y < rhs.y; } inline bool operator == (point rhs) const{ return (fabs(x-rhs.x) < EPS && fabs(y-rhs.y) < EPS); } }; istream& operator>>(istream &in, point &pt){ in >> pt.x >> pt.y; return in; } struct vec{ point_t x, y; vec(point_t x, point_t y): x(x), y(y){ } vec(point a, point b){ *this = vec(b.x-a.x, b.y-a.y); } inline vec operator* (point_t r) const{ return vec(x*r, y*r); } inline vec operator+ (point x) const{ return vec(this->x+x.x, this->y+x.y); } }; inline point operator+ (point p, vec v){ p.x += v.x; p.y += v.y; return p; } struct line{ point_t a, b, c; line(point p1, point p2){ if(fabs(p1.x-p2.x) < EPS){ a = 1.0; b = 0.0; c = -p1.x; return; } else{ a = -1.0*(p1.y-p2.y)/(p1.x-p2.x); b = 1.0; c = -(a*p1.x)-p1.y; return; } } }; inline point_t dist(point p1, point p2){ return hypot(p1.x-p2.x, p1.y-p2.y); } inline point_t dot(vec a, vec b){ return a.x*b.x + a.y*b.y; } inline point_t norm_sq(vec v){ return v.x*v.x+v.y*v.y; } inline bool areParallel(line l1, line l2){ return (fabs(l1.a-l2.a) < EPS && fabs(l1.b-l2.b) < EPS); } point_t distToLine(point p, point a, point b, point &ret){ vec ap = vec(a, p); vec ab = vec(a, b); point_t u = dot(ap, ab)/norm_sq(ab); ret = a+ab*u; return dist(p, ret); } point_t distToLineSegment(point p, point a, point b, point &ret){ //Similar to distToLine, but won't extend the line further vec ap = vec(a, p); vec ab = vec(a, b); point_t u = dot(ap, ab)/norm_sq(ab); if(u < 0.0){ ret = a; return dist(p, a); } else if(u > 1.0){ ret = b; return dist(p, b); } return distToLine(p, a, b, ret); } inline bool areSame(line l1, line l2){ return areParallel(l1, l2) && (fabs(l1.c-l2.c) < EPS); } bool areIntersect(line l1, line l2, point &ret){ if(areParallel(l1, l2)) return false; ret.x = (l2.b*l1.c - l1.b*l2.c) / (l2.a*l1.b - l1.a*l2.b); if(fabs(l1.b) > EPS) ret.y = -(l1.a*ret.x + l1.c); else ret.y = -(l2.a*ret.x + l2.c); assert(fabs(ret.x*l1.a + ret.y*l1.b + l1.c) < EPS); return true; } point_t angle(point a, point o, point b){ vec oa = vec(o, a); vec ob = vec(o, b); return acos(dot(oa, ob)/sqrt(norm_sq(oa)*norm_sq(ob))); } inline point rotate(point p, point_t theta){ point_t rad = theta*PI/180.0; return point(p.x*cos(rad)-p.y*sin(rad), p.x*sin(rad)+p.y*cos(rad)); } inline point_t cross(vec a, vec b){ return a.x*b.y - a.y*b.x; } inline point_t ccw(point p, point q, point r){ // >0 -> counter-cw == 0 -> collinear <0 -> cw return cross(vec(p, q), vec(p, r)); } point_t area(const vector<point> &P){ //P[0] == P[-1] //points must be sorted point_t ret = 0, x1, y1, x2, y2; for(int i = 0; i+1 < P.size(); i++){ x1 = P[i].x, x2 = P[i+1].x; y1 = P[i].y, y2 = P[i+1].y; ret += (x1*y2 - x2*y1); } return ret/2; } bool isConvex(const vector<point> &P){ //wont work if there are consec st lines int sz = P.size(); if(sz <= 3) return false; bool isLeft = (ccw(P[0], P[1], P[2]) > 0); for(int i = 1; i+1 < sz; i++) if(ccw(P[i], P[i+1], (i+2 == sz? P[1]: P[i+2])) != isLeft) return false; return true; } bool inPolygon(point pt, const vector<point> &P){ if(P.size() == 0) return false; point_t sum = 0; for(int i = 0; i+1 < P.size(); i++) if(ccw(pt, P[i], P[i+1]) > 0) sum += angle(P[i], pt, P[i+1]); else sum -= angle(P[i], pt, P[i+1]); if(fabs(sum-2*PI) < EPS) return true; if(fabs(sum+2*PI) < EPS) return true; return false; } void sortList(vector<point> &a){ sort(a.begin(), a.end(), [](point x, point y){return x.y < y.y;}); sort(a.begin()+1, a.end(), [&a](point x, point y){ point_t ret = ccw(x, a[0], y); if(fabs(ret) < EPS) return dist(x, a[0]) < dist(y, a[0]); else return ret < 0; }); return; } vector<point> convexHull(vector<point> &a){ vector<point> ret; sortList(a); for(point x: a){ while(ret.size() >= 2 && ccw(ret[ret.size()-2], ret[ret.size()-1], x) < -EPS) ret.pop_back(); ret.pb(x); } return ret; } void closestHelp(point *a, int l, int r, point_t &gmin){ if(l+1 == r) return; int mid = l+r>>1; closestHelp(a, l, mid, gmin); closestHelp(a, mid, r, gmin); vector<point> ord; int sweep = 0; for(int i = l, j = mid; i < mid && j < r; ){ int id = (i < mid && (a[i].y < a[j].y || j < r)? i++: j++); while(sweep < ord.size() && a[id].y-ord[sweep].y >= gmin) sweep++; for(int i = sweep; i < ord.size(); i++) gmin = min(gmin, dist(ord[i], a[id])); ord.pb(a[id]); } for(int i = 0; i < ord.size(); i++) a[i] = ord[i]; return; } point_t closestPair(point *a, int n){ sort(a, a+n, [](point x, point y){return x.x < y.x;}); point_t res = LINF; closestHelp(a, 0, n, res); return res; } vector<point> a, b; int main(){ #ifdef LOCAL freopen("../input.txt","r",stdin); // freopen("output.txt","w",stdout); freopen("../debug.txt", "w", stderr); #endif ios_base::sync_with_stdio(false); a.resize(4), b.resize(4); cin >> a >> b; a.pb(a.front()); b.pb(b.front()); for(int i = 0; i < 4; i++) for(int j = 0; j < 4; j++){ if(a[i] == b[j]){ cout << "Yes" << endl; return 0; } } for(int i = 0; i < 4; i++){ if(inPolygon(a[i], b) || inPolygon(b[i], a)){ cout << "Yes" << endl; return 0; } } for(int i = 0; i < 4; i++) for(int j = 0; j < 4; j++){ point at; if(areIntersect(line(a[i], a[(i+1)%4]), line(b[j], b[(j+1)%4]), at)){ point to; distToLineSegment(at, a[i], a[(i+1)%4], to); if(at == to); else break; distToLineSegment(at, b[j], b[(j+1)%4], to); if(at == to){ cout << "Yes" << endl; return 0; } } } cout << "No" << endl; return 0; }
<filename>persistence/workload_config.go<gh_stars>0 package persistence import ( "bytes" "encoding/json" "errors" "fmt" "github.com/boltdb/bolt" "github.com/golang/glog" ) // workload variable configuration table name const WORKLOAD_CONFIG = "workload_config" type WorkloadConfig struct { WorkloadURL string `json:"workload_url"` Org string `json:"organization"` VersionExpression string `json:"workload_version"` // This is a version range Attributes []Attribute `json:"attributes"` } func (w WorkloadConfig) String() string { return fmt.Sprintf("WorkloadURL: %v, "+ "Org: %v, "+ "VersionExpression: %v, "+ "Attributes: %v", w.WorkloadURL, w.Org, w.VersionExpression, w.Attributes) } func (w *WorkloadConfig) GetKey() string { catNull := func(str string) string { return fmt.Sprintf("%s\x00", str) } var sb bytes.Buffer sb.WriteString(catNull(w.WorkloadURL)) sb.WriteString(catNull(w.Org)) sb.WriteString(w.VersionExpression) return sb.String() } // create a new workload config object and save it to db. func NewWorkloadConfig(db *bolt.DB, workloadURL string, org string, version string, variables []Attribute) (*WorkloadConfig, error) { if workloadURL == "" || org == "" || version == "" { return nil, errors.New("WorkloadConfig, workload URL, organization, or version is empty, cannot persist") } if wcfg, err := FindWorkloadConfig(db, workloadURL, org, version); err != nil { return nil, err } else if wcfg != nil { return nil, fmt.Errorf("Not expecting any records with WorkloadURL %v, org %v, and version %v, found %v", workloadURL, org, version, wcfg) } new_cfg := &WorkloadConfig{ WorkloadURL: workloadURL, Org: org, VersionExpression: version, Attributes: variables, } return new_cfg, db.Update(func(tx *bolt.Tx) error { if b, err := tx.CreateBucketIfNotExists([]byte(WORKLOAD_CONFIG)); err != nil { return err } else if bytes, err := json.Marshal(new_cfg); err != nil { return fmt.Errorf("Unable to marshal new record: %v", err) } else if err := b.Put([]byte(new_cfg.GetKey()), []byte(bytes)); err != nil { return fmt.Errorf("Unable to persist workload config: %v", err) } else { glog.Infof("serialized to db record: %v", string(bytes)) } // success, close tx return nil }) } // Used to assist in demarshalling just the workload config object. The attributes stored with the object // could be of various types and schemas. type WorkloadConfigOnly struct { WorkloadURL string `json:"workload_url"` Org string `json:"organization"` VersionExpression string `json:"workload_version"` // This is a version range Attributes []map[string]interface{} `json:"attributes"` } func hydrateWorkloadConfig(cfgOnly *WorkloadConfigOnly) (*WorkloadConfig, error) { if cfgOnly == nil { return nil, nil } attrList := make([]Attribute, 0, 10) for _, intf := range cfgOnly.Attributes { if sa, err := json.Marshal(intf); err != nil { glog.Errorf("Unable to serialize workload config attribute %v, error %v", intf, err) return nil, err } else if attr, err := HydrateConcreteAttribute(sa); err != nil { glog.Errorf("Unable to hydrate workload config attribute %s, error %v", sa, err) return nil, err } else { attrList = append(attrList, attr) } } return &WorkloadConfig{ WorkloadURL: cfgOnly.WorkloadURL, Org: cfgOnly.Org, VersionExpression: cfgOnly.VersionExpression, Attributes: attrList, }, nil } // find the workload config variables in the db func FindWorkloadConfig(db *bolt.DB, url string, org string, version string) (*WorkloadConfig, error) { var cfg *WorkloadConfig // fetch workload config objects readErr := db.View(func(tx *bolt.Tx) error { var cfgOnly *WorkloadConfigOnly if b := tx.Bucket([]byte(WORKLOAD_CONFIG)); b != nil { err := b.ForEach(func(k, v []byte) error { var w WorkloadConfigOnly if err := json.Unmarshal(v, &w); err != nil { glog.Errorf("Unable to deserialize workload config db record %v, error %v", string(v), err) return err } else if w.WorkloadURL == url && w.Org == org && w.VersionExpression == version { cfgOnly = &w return nil } return nil }) if err != nil { return err } } // If we found an eligible object, deserialize the attribute list var err error cfg, err = hydrateWorkloadConfig(cfgOnly) return err // end the transaction }) if readErr != nil { return nil, readErr } else { return cfg, nil } } // filter on WorkloadConfig type WCFilter func(WorkloadConfigOnly) bool // filter for all workload config objects func AllWCFilter() WCFilter { return func(e WorkloadConfigOnly) bool { return true } } // filter for all the workload config objects for the given url func AllWorkloadWCFilter(workload_url string, org string) WCFilter { return func(e WorkloadConfigOnly) bool { if e.WorkloadURL == workload_url && e.Org == org { return true } else { return false } } } // find the microservice instance from the db func FindWorkloadConfigs(db *bolt.DB, filters []WCFilter) ([]WorkloadConfig, error) { cfg_instances := make([]WorkloadConfig, 0) // fetch contracts readErr := db.View(func(tx *bolt.Tx) error { cfgOnly_instances := make([]WorkloadConfigOnly, 0) if b := tx.Bucket([]byte(WORKLOAD_CONFIG)); b != nil { err := b.ForEach(func(k, v []byte) error { var e WorkloadConfigOnly if err := json.Unmarshal(v, &e); err != nil { glog.Errorf("Unable to deserialize db record: %v", v) return err } else { glog.V(5).Infof("Demarshalled workload config object in DB: %v", e) exclude := false for _, filterFn := range filters { if !filterFn(e) { exclude = true } } if !exclude { cfgOnly_instances = append(cfgOnly_instances, e) } } return nil }) if err != nil { return err } } // If we found eligible objects, deserialize the attribute list for each one for _, cfgOnly := range cfgOnly_instances { if cfg, err := hydrateWorkloadConfig(&cfgOnly); err != nil { return err } else { cfg_instances = append(cfg_instances, *cfg) } } return nil // end the transaction }) if readErr != nil { return nil, readErr } else { return cfg_instances, nil } } func DeleteWorkloadConfig(db *bolt.DB, url string, org string, version string) error { if url == "" || version == "" { return errors.New("workload URL or version is empty, cannot delete") } else { if cfg, err := FindWorkloadConfig(db, url, org, version); err != nil { return err } else if cfg == nil { return fmt.Errorf("could not find record for %v and %v", url, version) } else { return db.Update(func(tx *bolt.Tx) error { if b, err := tx.CreateBucketIfNotExists([]byte(WORKLOAD_CONFIG)); err != nil { return err } else if err := b.Delete([]byte(cfg.GetKey())); err != nil { return fmt.Errorf("Unable to delete workload config: %v", err) } else { return nil } }) } } }
N=int(input()) S=list(input()) L=0 R=0 for i in range(N): if(S[i]=="("): R+=1 elif(S[i]==")"): if(R==0): L+=1 else: R-=1 for i in range(R): S.append(")") for i in range(L): S.insert(0,"(") print("".join(S))
<gh_stars>0 // Import plugin import React from "react" import { Link } from "gatsby" // Import settings import settings from "../../../layouts/index" // Import component // Import styles import { Header } from "./style/style" // Create new component const HeaderComponent = () => { return ( <> <Header theme={settings}> <ul> <li> <Link to="/" activeClassName="active"> Home </Link> </li> <li> <Link to="/search" activeClassName="active"> Search </Link> </li> </ul> </Header> </> ) } // Export new component export default HeaderComponent
/* Converts from SDL keysym to DX key mapping. */ static int s_MapSDLKeyToDXKey(const SDL_Keysym *keysym) { switch(keysym->scancode) { case SDL_SCANCODE_A: return KEY_INPUT_A; case SDL_SCANCODE_B: return KEY_INPUT_B; case SDL_SCANCODE_C: return KEY_INPUT_C; case SDL_SCANCODE_D: return KEY_INPUT_D; case SDL_SCANCODE_E: return KEY_INPUT_E; case SDL_SCANCODE_F: return KEY_INPUT_F; case SDL_SCANCODE_G: return KEY_INPUT_G; case SDL_SCANCODE_H: return KEY_INPUT_H; case SDL_SCANCODE_I: return KEY_INPUT_I; case SDL_SCANCODE_J: return KEY_INPUT_J; case SDL_SCANCODE_K: return KEY_INPUT_K; case SDL_SCANCODE_L: return KEY_INPUT_L; case SDL_SCANCODE_M: return KEY_INPUT_M; case SDL_SCANCODE_N: return KEY_INPUT_N; case SDL_SCANCODE_O: return KEY_INPUT_O; case SDL_SCANCODE_P: return KEY_INPUT_P; case SDL_SCANCODE_Q: return KEY_INPUT_Q; case SDL_SCANCODE_R: return KEY_INPUT_R; case SDL_SCANCODE_S: return KEY_INPUT_S; case SDL_SCANCODE_T: return KEY_INPUT_T; case SDL_SCANCODE_U: return KEY_INPUT_U; case SDL_SCANCODE_V: return KEY_INPUT_V; case SDL_SCANCODE_W: return KEY_INPUT_W; case SDL_SCANCODE_X: return KEY_INPUT_X; case SDL_SCANCODE_Y: return KEY_INPUT_Y; case SDL_SCANCODE_Z: return KEY_INPUT_Z; case SDL_SCANCODE_1: return KEY_INPUT_1; case SDL_SCANCODE_2: return KEY_INPUT_2; case SDL_SCANCODE_3: return KEY_INPUT_3; case SDL_SCANCODE_4: return KEY_INPUT_4; case SDL_SCANCODE_5: return KEY_INPUT_5; case SDL_SCANCODE_6: return KEY_INPUT_6; case SDL_SCANCODE_7: return KEY_INPUT_7; case SDL_SCANCODE_8: return KEY_INPUT_8; case SDL_SCANCODE_9: return KEY_INPUT_9; case SDL_SCANCODE_0: return KEY_INPUT_0; case SDL_SCANCODE_RETURN: return KEY_INPUT_RETURN; case SDL_SCANCODE_ESCAPE: return KEY_INPUT_ESCAPE; case SDL_SCANCODE_BACKSPACE: return KEY_INPUT_BACK; case SDL_SCANCODE_TAB: return KEY_INPUT_TAB; case SDL_SCANCODE_SPACE: return KEY_INPUT_SPACE; case SDL_SCANCODE_MINUS: return KEY_INPUT_MINUS; case SDL_SCANCODE_EQUALS: return KEY_INPUT_EQUALS; case SDL_SCANCODE_LEFTBRACKET: return KEY_INPUT_LBRACKET; case SDL_SCANCODE_RIGHTBRACKET: return KEY_INPUT_RBRACKET; case SDL_SCANCODE_BACKSLASH: return KEY_INPUT_BACKSLASH; case SDL_SCANCODE_SEMICOLON: return KEY_INPUT_SEMICOLON; case SDL_SCANCODE_COMMA: return KEY_INPUT_COMMA; case SDL_SCANCODE_PERIOD: return KEY_INPUT_PERIOD; case SDL_SCANCODE_SLASH: return KEY_INPUT_SLASH; case SDL_SCANCODE_CAPSLOCK: return KEY_INPUT_CAPSLOCK; case SDL_SCANCODE_F1: return KEY_INPUT_F1; case SDL_SCANCODE_F2: return KEY_INPUT_F2; case SDL_SCANCODE_F3: return KEY_INPUT_F3; case SDL_SCANCODE_F4: return KEY_INPUT_F4; case SDL_SCANCODE_F5: return KEY_INPUT_F5; case SDL_SCANCODE_F6: return KEY_INPUT_F6; case SDL_SCANCODE_F7: return KEY_INPUT_F7; case SDL_SCANCODE_F8: return KEY_INPUT_F8; case SDL_SCANCODE_F9: return KEY_INPUT_F9; case SDL_SCANCODE_F10: return KEY_INPUT_F10; case SDL_SCANCODE_F11: return KEY_INPUT_F11; case SDL_SCANCODE_F12: return KEY_INPUT_F12; case SDL_SCANCODE_PRINTSCREEN: return KEY_INPUT_SYSRQ; case SDL_SCANCODE_SCROLLLOCK: return KEY_INPUT_SCROLL; case SDL_SCANCODE_PAUSE: return KEY_INPUT_PAUSE; case SDL_SCANCODE_INSERT: return KEY_INPUT_INSERT; case SDL_SCANCODE_HOME: return KEY_INPUT_HOME; case SDL_SCANCODE_PAGEUP: return KEY_INPUT_PGUP; case SDL_SCANCODE_DELETE: return KEY_INPUT_DELETE; case SDL_SCANCODE_END: return KEY_INPUT_END; case SDL_SCANCODE_PAGEDOWN: return KEY_INPUT_PGDN; case SDL_SCANCODE_RIGHT: return KEY_INPUT_RIGHT; case SDL_SCANCODE_LEFT: return KEY_INPUT_LEFT; case SDL_SCANCODE_DOWN: return KEY_INPUT_DOWN; case SDL_SCANCODE_UP: return KEY_INPUT_UP; case SDL_SCANCODE_NUMLOCKCLEAR: return KEY_INPUT_NUMLOCK; case SDL_SCANCODE_KP_DIVIDE: return KEY_INPUT_DIVIDE; case SDL_SCANCODE_KP_MULTIPLY: return KEY_INPUT_MULTIPLY; case SDL_SCANCODE_KP_MINUS: return KEY_INPUT_SUBTRACT; case SDL_SCANCODE_KP_PLUS: return KEY_INPUT_ADD; case SDL_SCANCODE_KP_ENTER: return KEY_INPUT_NUMPADENTER; case SDL_SCANCODE_KP_1: return KEY_INPUT_NUMPAD1; case SDL_SCANCODE_KP_2: return KEY_INPUT_NUMPAD2; case SDL_SCANCODE_KP_3: return KEY_INPUT_NUMPAD3; case SDL_SCANCODE_KP_4: return KEY_INPUT_NUMPAD4; case SDL_SCANCODE_KP_5: return KEY_INPUT_NUMPAD5; case SDL_SCANCODE_KP_6: return KEY_INPUT_NUMPAD6; case SDL_SCANCODE_KP_7: return KEY_INPUT_NUMPAD7; case SDL_SCANCODE_KP_8: return KEY_INPUT_NUMPAD8; case SDL_SCANCODE_KP_9: return KEY_INPUT_NUMPAD9; case SDL_SCANCODE_KP_0: return KEY_INPUT_NUMPAD0; case SDL_SCANCODE_KP_PERIOD: return KEY_INPUT_DECIMAL; case SDL_SCANCODE_KP_COMMA: return KEY_INPUT_NUMPADCOMMA; case SDL_SCANCODE_LCTRL: return KEY_INPUT_LCONTROL; case SDL_SCANCODE_LSHIFT: return KEY_INPUT_LSHIFT; case SDL_SCANCODE_LALT: return KEY_INPUT_LALT; case SDL_SCANCODE_LGUI: return KEY_INPUT_LWIN; case SDL_SCANCODE_RCTRL: return KEY_INPUT_RCONTROL; case SDL_SCANCODE_RSHIFT: return KEY_INPUT_RSHIFT; case SDL_SCANCODE_RALT: return KEY_INPUT_RALT; case SDL_SCANCODE_RGUI: return KEY_INPUT_RWIN; case SDL_SCANCODE_INTERNATIONAL3: return KEY_INPUT_YEN; case SDL_SCANCODE_KP_AT: return KEY_INPUT_AT; case SDL_SCANCODE_KP_COLON: return KEY_INPUT_COLON; default: return 0; } }
// Copyright 2016 <NAME> (Falcons) // SPDX-License-Identifier: Apache-2.0 // Copyright 2016 <NAME> // Licensed under the Apache License version 2.0 // You may not use this file except in compliance with this License // You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 #ifndef DRV8301_HPP #define DRV8301_HPP #include <inttypes.h> class drv8301 { private: public: drv8301( ); uint32_t getValue( bool ballHandler ); }; #endif
/** * A factory to create new instances of classes of open metadata classifications by name. Return null if the classification is not known. */ public class ClassificationFactory { private static final Logger log = LoggerFactory.getLogger( ClassificationFactory.class); private static final String className = ClassificationFactory.class.getName(); private OMRSRepositoryHelper repositoryHelper; private OpenMetadataAPIGenericHandler genericHandler; public ClassificationFactory(OpenMetadataAPIGenericHandler genericHandler){ this.genericHandler= genericHandler; this.repositoryHelper=genericHandler.getRepositoryHelper(); } /** * Get an OMAS Classification based on the name and then map the supplied omrs classificaiton to the omas one. * @param name name of the classification. Note this may not match the classification name if the classification is a sub type of the supplied name. * @param omrsClassification the supplied omrs classification to map * @return the omas classification, null if not known. */ public org.odpi.openmetadata.accessservices.subjectarea.properties.classifications.Classification getOMASClassification(String name, Classification omrsClassification) { final String serviceName = genericHandler.getServiceName(); if (this.repositoryHelper.isTypeOf(serviceName,name,"SpineAttribute")) { return new SpineAttributeMapper(genericHandler).mapOmrsToBean(omrsClassification); } if (this.repositoryHelper.isTypeOf(serviceName,name,"CanonicalVocabulary")) { return new CanonicalVocabularyMapper(genericHandler).mapOmrsToBean(omrsClassification); } if (this.repositoryHelper.isTypeOf(serviceName,name,"Confidence")) { return new ConfidenceMapper(genericHandler).mapOmrsToBean(omrsClassification); } if (this.repositoryHelper.isTypeOf(serviceName,name,"Criticality")) { return new CriticalityMapper(genericHandler).mapOmrsToBean(omrsClassification); } if (this.repositoryHelper.isTypeOf(serviceName,name,"GlossaryProject")) { return new GlossaryProjectMapper(genericHandler).mapOmrsToBean(omrsClassification); } if (this.repositoryHelper.isTypeOf(serviceName,name,"SpineObject")) { return new SpineObjectMapper(genericHandler).mapOmrsToBean(omrsClassification); } if (this.repositoryHelper.isTypeOf(serviceName,name,"Taxonomy")) { return new TaxonomyMapper(genericHandler).mapOmrsToBean(omrsClassification); } if (this.repositoryHelper.isTypeOf(serviceName,name,"Retention")) { return new RetentionMapper(genericHandler).mapOmrsToBean(omrsClassification); } if (this.repositoryHelper.isTypeOf(serviceName,name,"SubjectArea")) { return new SubjectAreaMapper(genericHandler).mapOmrsToBean(omrsClassification); } if (this.repositoryHelper.isTypeOf(serviceName,name,"ObjectIdentifier")) { return new ObjectIdentifierMapper(genericHandler).mapOmrsToBean(omrsClassification); } if (this.repositoryHelper.isTypeOf(serviceName,name,"Confidentiality")) { return new ConfidentialityMapper(genericHandler).mapOmrsToBean(omrsClassification); } return null; } /** * Get an OMRS Classification based on the the supplied omas classification. * @param omasClassification the supplied omas classification to map * @return the omas classification, null if not known. */ public Classification getOMRSClassification(org.odpi.openmetadata.accessservices.subjectarea.properties.classifications.Classification omasClassification) { final String name = omasClassification.getClassificationName(); final String serviceName = genericHandler.getServiceName(); if (this.repositoryHelper.isTypeOf(serviceName,name,"SpineAttribute")) { return new SpineAttributeMapper(genericHandler).mapBeanToOmrs(omasClassification); } if (this.repositoryHelper.isTypeOf(serviceName,name,"CanonicalVocabulary")) { return new CanonicalVocabularyMapper(genericHandler).mapBeanToOmrs(omasClassification); } if (this.repositoryHelper.isTypeOf(serviceName,name,"Confidence")) { return new ConfidenceMapper(genericHandler).mapBeanToOmrs(omasClassification); } if (this.repositoryHelper.isTypeOf(serviceName,name,"Criticality")) { return new CriticalityMapper(genericHandler).mapBeanToOmrs(omasClassification); } if (this.repositoryHelper.isTypeOf(serviceName,name,"GlossaryProject")) { return new GlossaryProjectMapper(genericHandler).mapBeanToOmrs(omasClassification); } if (this.repositoryHelper.isTypeOf(serviceName,name,"SpineObject")) { return new SpineObjectMapper(genericHandler).mapBeanToOmrs(omasClassification); } if (this.repositoryHelper.isTypeOf(serviceName,name,"Taxonomy")) { return new TaxonomyMapper(genericHandler).mapBeanToOmrs(omasClassification); } if (this.repositoryHelper.isTypeOf(serviceName,name,"Retention")) { return new RetentionMapper(genericHandler).mapBeanToOmrs(omasClassification); } if (this.repositoryHelper.isTypeOf(serviceName,name,"SubjectArea")) { return new SubjectAreaMapper(genericHandler).mapBeanToOmrs(omasClassification); } if (this.repositoryHelper.isTypeOf(serviceName,name,"ObjectIdentifier")) { return new ObjectIdentifierMapper(genericHandler).mapBeanToOmrs(omasClassification); } if (this.repositoryHelper.isTypeOf(serviceName,name,"Confidentiality")) { return new ConfidentialityMapper(genericHandler).mapBeanToOmrs(omasClassification); } return null; } }
It's nature's example of Joseph and his Technicolour Dream Coat, the chameleon. They're just phenomenal. There is this myth that chameleons change colour to blend in with their surroundings, but this is actually not true. Most of the reason chameleons change colour is as a signal, a visual signal of mood and aggression, territory and mating behaviour. The way that chameleons actually do this is molecular - they're molecular masterminds, really. If you look at the skin of a chameleon, you find that they have several layers of specialised cells called chromatophores and these are cells that can change colour. On the outer surface of the chameleon, the skin is transparent and just below that is the first layer of these cells, and they contain pigments. These cells are called xanthophores, containing particular specialised pigments that have a yellow colour. Beneath that are pigment cells which are called erythrophores, which have a red colour in them. Beneath them is another layer of cells called iridiphores, which have a blue coloured pigment called guanine; this is actually also used in making DNA. Underneath all of those is another layer of cells called melanophores, which have a brown pigment - melanin - in them. Now, how does the chameleon change colour? Well the chromatophores are wired up to the nervous system. They are also sensitive to chemicals that are washing around in the blood stream of the chameleon. What happens is that the colours are locked away in tiny vesicles, little sacs inside the cells that keep them in one place, so the cells don't look coloured. But, when a signal comes in from the nervous system or from the blood stream, the granules or vesicles can discharge, allowing the colour to spread out across the cell, and this alters the colour of the cell. It's rather like giving the cell a coat of paint. By varying the relative amount of activity of the different chromatophores in different layers of the skin, it's like mixing different paints together. So if you mix red and yellow, you get orange for example, and this is how chameleons do this. They mix different contributions of these chromatophores. It's a bit like on your television screen. When you mix different colours together on the screen to get the colour that the eye ultimately perceives and so, that's how the chameleon changes colour, and usually does so to convey mood. So a calm chameleon is a pale greeny colour. When it gets angry, it might go bright yellow, and when it wants to mate, it basically turns on every possible colour it can which shows that it's in the mood. This is not unique to chameleons. Other animals also have these chromatophores. Cuttlefish are another very elegant example of how this works. For chameleons though, it's not so much to do with camouflage, it's more to do with communication...
// STD Libs #include<cmath> #include<string> #include<ctime> #include<cstdio> #include<vector> // VItA Libs #include<structures/domain/AbstractDomain.h> #include<structures/domain/SimpleDomain.h> #include<structures/domain/DomainNVR.h> #include<structures/domain/PartiallyVascularizedDomain.h> #include<structures/domain/StagedDomain.h> #include<core/GeneratorData.h> #include<core/StagedFRROTreeGenerator.h> #include<constrains/ConstantConstraintFunction.h> #include<constrains/ConstantPiecewiseConstraintFunction.h> #include<structures/tree/SingleVesselCCOOTree.h> #include<structures/vascularElements/AbstractVascularElement.h> #include<core/TreeMerger.h> #include<io/task/CheckpointSavingTask.h> #include<io/task/VisualizationSavingTask.h> #include<io/VTKObjectTreeNodalWriter.h> #include<io/VTKObjectTreeStrahlerWriter.h> #include<structures/tree/AdimSproutingVolumetricCostEstimator.h> #include<structures/tree/SproutingVolumetricCostEstimator.h> #include<io/StagedFRROTreeGeneratorLogger.h> using namespace std; // Folders location string input_folder {"/home/lfmc/HeMoLab/VascularizacaoRenal/Input/"}; string output_folder {"/home/lfmc/HeMoLab/VascularizacaoRenal/Output/"}; string steps_folder {"/home/lfmc/HeMoLab/VascularizacaoRenal/Steps/"}; string input_vtk_folder {input_folder + "VTK/"}; string input_cco_folder {input_folder + "CCO/"}; string output_vtp_folder {output_folder + "VTP/"}; string output_cco_folder {output_folder + "CCO/"}; string output_log_folder {output_folder + "LOG/"}; string output_points_folder {output_folder + "Points/"}; string steps_vtp_folder {steps_folder + "VTP/"}; string steps_cco_folder {steps_folder + "CCO/"}; string vascularize_base(int vol_f, int prot_f, int dif_f, string suffix_id) { printf("Begin base tree vascularization.\n"); double v0 {8}; double r0 {0.09}; double q0 {0.72}; int n_level_test {16000}; int n_terminal_trial {2000}; double d_lim_red_factor {.9}; double mid_point_d_lim_factor {.25}; double perfusion_area_factor {1.0}; double close_neighborhood_factor {8.0}; int n_bifurcation_test {7}; int n_draw {2000}; int seed {1922}; double min_bif_angle {3./18. * (M_PI)}; long long int n_term {200}; AbstractConstraintFunction<double, int> *gam {new ConstantConstraintFunction<double, int>(3.0)}; AbstractConstraintFunction<double, int> *nu {new ConstantConstraintFunction<double, int>(3.6)}; AbstractConstraintFunction<double, int> *eps_lim {new ConstantPiecewiseConstraintFunction<double, int>({0.4, 0}, {0, 7})}; // Timestamp // time_t now_time_t = time(nullptr); // struct tm *now_tm = localtime(&now_time_t); // char time_c_string[21]; // strftime(time_c_string, 20, "%d_%m_%Y_%H_%M_%S", now_tm); // string time_string(time_c_string); // Relevant input files //Geometry string input_vtk_hull {input_vtk_folder + "rsMultipleInputhull.vtk"}; //Vascular tree string input_cco {input_cco_folder + "rsMultipleInput_new.cco"}; // Output // string output_base {"rsMultipleInputBase_" + string(functional_name) + "_" + time_string}; string output_base {"rsMultipleInput_" + suffix_id}; double scale_factor {10000.0}; AdimSproutingVolumetricCostEstimator *cost_estimator {new AdimSproutingVolumetricCostEstimator(vol_f / scale_factor, prot_f / scale_factor, dif_f / scale_factor, v0, r0)}; AbstractVascularElement::VESSEL_FUNCTION function {AbstractVascularElement::VESSEL_FUNCTION::DISTRIBUTION}; GeneratorData *gen_data_0 {new GeneratorData(n_level_test, n_terminal_trial, d_lim_red_factor, perfusion_area_factor, close_neighborhood_factor, mid_point_d_lim_factor, n_bifurcation_test, function, false, cost_estimator)}; SimpleDomain *domain_0 {new SimpleDomain(input_vtk_hull, n_draw, seed, gen_data_0)}; (*domain_0).setIsConvexDomain(false); (*domain_0).setMinBifurcationAngle(min_bif_angle); StagedDomain *staged_domain {new StagedDomain()}; (*staged_domain).addStage(n_term, domain_0); long long int n_term_total {n_term}; SingleVesselCCOOTree *tree {new SingleVesselCCOOTree(input_cco, gen_data_0, q0, gam, eps_lim, nu, 0.0, 1.0e-5)}; tree->setIsInCm(true); StagedFRROTreeGenerator *tree_generator {new StagedFRROTreeGenerator(staged_domain, tree, n_term_total, {gam}, {eps_lim}, {nu})}; // CheckpointSavingTask *check_saving_task {new CheckpointSavingTask(steps_folder, output_base + "_step_")}; // VisualizationSavingTask *vtk_saving_task {new VisualizationSavingTask(steps_folder, output_base + "_steps_")}; // (*tree_generator).setSavingTasks({check_saving_task, vtk_saving_task}); tree = {(SingleVesselCCOOTree *) (*tree_generator).resume(500, steps_folder)}; string output_cco {output_cco_folder + output_base + ".cco"}; (*tree).save(output_cco); VTKObjectTreeStrahlerWriter *tree_writer {new VTKObjectTreeStrahlerWriter()}; string output_vtp {output_vtp_folder + output_base + ".vtp"}; (*tree_writer).write(output_vtp, tree); string output_log {output_log_folder + output_base + ".log"}; FILE* fp {fopen((output_log).c_str(), "w")}; if(!fp) { fprintf(stderr, "Failed to create configuration log file.\n"); exit(EXIT_FAILURE); } StagedFRROTreeGeneratorLogger *logger {new StagedFRROTreeGeneratorLogger(fp, tree_generator)}; (*logger).write(); delete logger; fclose(fp); delete tree_writer; // delete vtk_saving_task; // delete check_saving_task; // delete tree_generator; delete tree; delete staged_domain; delete domain_0; delete gen_data_0; delete eps_lim; delete nu; delete gam; printf("End base tree vascularization"); return output_cco; } string vascularize_part(string suffix, string input_part, string input_cco) { // Relevant input files //Geometry string input_vtk_part {input_vtk_folder + input_part}; // Output string output_base {"rsMultipleInput_" + suffix}; string output_cco {output_cco_folder + output_base + ".cco"}; string output_log {output_log_folder + output_base + ".log"}; string output_vtp {output_vtp_folder + output_base + ".vtp"}; string output_points {output_points_folder + output_base + ".points"}; int n_level_test {16000}; int n_terminal_trial {2000}; double d_lim_red_factor {.9}; double mid_point_d_lim_factor {.25}; double perfusion_area_factor {1.0}; vector<double> close_neighborhood_factor ({4., 2.}); int n_bifurcation_test {7}; int n_draw {2000}; vector<int> seed ({1517, 32}); double min_bif_angle {3./18. * (M_PI)}; long long int base_term {200}; long long int part_term {400}; AbstractVascularElement::VESSEL_FUNCTION function {AbstractVascularElement::VESSEL_FUNCTION::DISTRIBUTION}; AbstractConstraintFunction<double, int> *gam {new ConstantConstraintFunction<double, int>(3.0)}; AbstractConstraintFunction<double, int> *nu {new ConstantConstraintFunction<double, int>(3.6)}; AbstractConstraintFunction<double, int> *eps_lim {new ConstantPiecewiseConstraintFunction<double, int>({0.4, 0}, {0, 7})}; GeneratorData *gen_data_0 {new GeneratorData(n_level_test, n_terminal_trial, d_lim_red_factor, perfusion_area_factor, close_neighborhood_factor[0], mid_point_d_lim_factor, n_bifurcation_test, function, false)}; SimpleDomain *domain_0 {new SimpleDomain(input_vtk_part, n_draw, seed[0], gen_data_0)}; (*domain_0).setMinBifurcationAngle(min_bif_angle); (*domain_0).setIsConvexDomain(false); GeneratorData *gen_data_1 {new GeneratorData(n_level_test, n_terminal_trial, d_lim_red_factor, perfusion_area_factor, close_neighborhood_factor[1], mid_point_d_lim_factor, n_bifurcation_test, function, false)}; SimpleDomain *domain_1 {new SimpleDomain(input_vtk_part, n_draw, seed[1], gen_data_1)}; (*domain_1).setMinBifurcationAngle(min_bif_angle); (*domain_1).setIsConvexDomain(false); lldiv_t term_division = lldiv(part_term, 3); vector<long long int> n_term {term_division.quot, 2 * term_division.quot + term_division.rem}; StagedDomain *staged_domain {new StagedDomain()}; (*staged_domain).addStage(n_term[0], domain_0); (*staged_domain).addStage(n_term[1], domain_1); int merge_stage {1}; (*staged_domain).setInitialStage(merge_stage); SingleVesselCCOOTree *tree {new SingleVesselCCOOTree(input_cco, gen_data_0, gam, eps_lim, nu)}; (*tree).setIsInCm(true); StagedFRROTreeGenerator *tree_generator {new StagedFRROTreeGenerator(staged_domain, tree, base_term + part_term, {gam, gam, gam}, {eps_lim, eps_lim, eps_lim}, {nu, nu, nu})}; // (*tree_generator).setDLim(base_d_lim); FILE *fp_points {fopen(output_points.c_str(), "wb")}; if(!fp_points) { fprintf(stderr, "Failed to create points file.\n"); exit(EXIT_FAILURE); } (*tree_generator).resumeSavePointsMidPoint(10, steps_folder, fp_points); fclose(fp_points); (*tree).save(output_cco); VTKObjectTreeNodalWriter *tree_writer {new VTKObjectTreeNodalWriter()}; (*tree_writer).write(output_vtp, tree); FILE* fp {fopen(output_log.c_str(), "w")}; if(!fp) { fprintf(stderr, "Failed to create configuration log file.\n"); exit(EXIT_FAILURE); } StagedFRROTreeGeneratorLogger *logger {new StagedFRROTreeGeneratorLogger(fp, tree_generator)}; (*logger).write(); delete logger; fclose(fp); delete tree_writer; delete tree_generator; delete tree; delete staged_domain; delete domain_1; delete gen_data_1; delete domain_0; delete gen_data_0; delete eps_lim; delete nu; delete gam; return output_points; } void merge(string base_tree, vector<string> derived_points, string suffix) { printf("Begin merge.\n"); string output_base {"rsMultipleInput_" + suffix }; string output_cco {output_cco_folder + output_base + ".cco"}; string output_log {output_log_folder + output_base + ".log"}; string output_vtp {output_vtp_folder + output_base + ".vtp"}; AbstractConstraintFunction<double, int> *gam {new ConstantConstraintFunction<double, int>(3.0)}; AbstractConstraintFunction<double, int> *nu {new ConstantConstraintFunction<double, int>(3.6)}; AbstractConstraintFunction<double, int> *eps_lim {new ConstantPiecewiseConstraintFunction<double, int>({0.4, 0}, {0, 7})}; GeneratorData *gen_data {new GeneratorData()}; SingleVesselCCOOTree *tree {new SingleVesselCCOOTree(base_tree, gen_data, gam, eps_lim, nu)}; TreeMerger *tree_merger {new TreeMerger(tree, derived_points)}; try { tree = {(*tree_merger).mergeFast()}; (*tree).save(output_cco); VTKObjectTreeStrahlerWriter *tree_writer {new VTKObjectTreeStrahlerWriter()}; (*tree_writer).write(output_vtp, tree); delete tree_writer; } catch(std::out_of_range const&) { std::cout << "This merge failed!" << std::endl; } delete tree_merger; delete tree; delete gen_data; delete eps_lim; delete nu; delete gam; printf("End merge.\n"); } int main(int argc, char *argv[]) { // int factors[4][3] {{9990, 5, 5}, {9990, 10, 0}, {9990, 0, 10}, {10000, 0, 0}}; // int factors[10][3] {{2000, 0, 8000}, {1000, 0, 9000}, {3000, 0, 7000}, // {8000, 0, 2000}, {6000, 0, 4000}, {4000, 0, 6000}, {9000, 0, 1000}, {7000, 0, 3000}, // {5000, 0, 5000}, {0, 0, 10000}}; int factors[3][3] {{1000, 8000, 1000}, {2000, 7000, 1000}, {4000, 5000, 1000}}; vector<string> parts {"rsMultipleInputp01.vtk", "rsMultipleInputp02.vtk", "rsMultipleInputp03.vtk", "rsMultipleInputp04.vtk", "rsMultipleInputp05.vtk", "rsMultipleInputp06.vtk", "rsMultipleInputp07.vtk", "rsMultipleInputp08.vtk"}; // for (size_t vol_idx = 0; vol_idx < 11; ++vol_idx) { // for (size_t prot_idx = 0; prot_idx < vol_idx; ++prot_idx) { // int vol_f = 10000 - (1000 * vol_idx); // int prot_f = 1000 * prot_idx; // int dif_f = 10000 - vol_f - prot_f; // char functional_name[200]; // sprintf(functional_name, "func_%05d_%05d_%05d", vol_f, prot_f, dif_f); // string cco_base {vascularize_base(vol_f, prot_f, dif_f, string(functional_name)+"_base")}; // } for (int i = 0 ; i < 3; ++i) { int vol_f, prot_f, dif_f; vol_f = factors[i][0]; prot_f = factors[i][1]; dif_f = factors[i][2]; char functional_name[200]; sprintf(functional_name, "func_%05d_%05d_%05d", vol_f, prot_f, dif_f); // string cco_base {vascularize_base(vol_f, prot_f, dif_f, string(functional_name)+"_base")}; string cco_base {output_cco_folder + "rsMultipleInput_" + string(functional_name)+"_base.cco"}; vector<string> points; for (size_t j = 0; j != parts.size(); ++j) { char part_name[50]; sprintf(part_name, "_p%02lu", j); string points_output {vascularize_part(string(functional_name)+ string(part_name), parts[j], cco_base)}; points.push_back(points_output); } merge(cco_base, points, string(functional_name) + "_merged"); } return 0; }
# test bytearray.append method a = bytearray(4) print(a) # append should append a single byte a.append(2) print(a) # a should not be modified if append fails try: a.append(None) except TypeError: print('TypeError') print(a)
/** * Contains static helper methods for reading data and metadata from NetCDF * files, OPeNDAP servers and other data sources using the Unidata Common Data * Model. * * @author Jon Blower * @author Guy Griffiths * @author Mike Grant, Plymouth Marine Labs */ public final class CdmUtils { private static final Logger log = LoggerFactory.getLogger(CdmUtils.class); /* * Enforce non-instantiability */ private CdmUtils() { throw new AssertionError(); } /** * @param ncDataset * The {@link NetcdfDataset} to get a {@link GridDataset} from * @return A {@link GridDataset} from the given {@link NetcdfDataset} * @throws DataReadingException * If the given {@link NetcdfDataset} doesn't contain any * {@link GridDataset}s */ public static GridDataset getGridDataset(NetcdfDataset ncDataset) throws DataReadingException, IOException { /* * TODO Convert this to return Coverage objects once netcdf-5 is more * stable */ GridDataset gridDataset = new GridDataset(ncDataset); if (gridDataset.getGrids().size() == 0) { throw new DataReadingException("No grids found in underlying NetCDF dataset"); } return gridDataset; } /** * Estimates the optimum {@link DataReadingStrategy} from the given * NetcdfDataset. Essentially, if the data are remote (e.g. OPeNDAP) or * compressed, this will return {@link DataReadingStrategy#BOUNDING_BOX}, * which makes a single i/o call, minimizing the overhead. If the data are * local and uncompressed this will return * {@link DataReadingStrategy#SCANLINE}, which reduces the amount of data * read. * * @param nc * The NetcdfDataset from which data will be read. * @return an optimum DataReadingStrategy for reading from the dataset */ public static DataReadingStrategy getOptimumDataReadingStrategy(NetcdfDataset nc) { String fileType = nc.getFileTypeId(); if ("netCDF".equalsIgnoreCase(fileType) || "HDF4".equalsIgnoreCase(fileType)) { return DataReadingStrategy.SCANLINE; } else { try { GridDataset gridDataset = getGridDataset(nc); for (GridDatatype grid : gridDataset.getGrids()) { HorizontalGrid hGrid = CdmUtils .createHorizontalGrid(grid.getCoordinateSystem()); DataType dt = grid.getDataType(); long totalsize = hGrid.size() * dt.getSize(); /* * If the size of the largest grid is greater than a * fraction of the maximum amount of memory, use a SCANLINE * strategy. * * Here, we set the multiplier for the maximum memory. * Although it's relatively small, objects are actually * (considerably?) bigger than the (dimension * data type * size) result. Additionally, we need to run everything * else... * * If we get reports that this is still too large, it can be * lowered, or we can make it configurable. */ double multiplier = 0.5; if (totalsize > multiplier * Runtime.getRuntime().maxMemory()) { return DataReadingStrategy.SCANLINE; } } } catch (DataReadingException | IOException e) { /* * Ignore exception - it's either not a GridDataset or we can't * open it. If it's not a GridDataset, we won't be reading it * with a gridded strategy. If we can't open it, we're screwed * either way. */ } return DataReadingStrategy.BOUNDING_BOX; } } /** * * @param coordSys * The {@link GridCoordSystem} to create a {@link HorizontalGrid} * from * @return two-dimensional referenceable grid from the given grid coordinate * system. Will return more specific subclasses ( * {@link RectilinearGrid} or {@link RegularGrid}) if appropriate * for the passed-in coordinate system. The grid's coordinate system * will be a WGS84 longitude-latitude system. * * TODO May want to be careful about datum shifts - model data is * often in spherical coordinates, not strict WGS84 */ public static HorizontalGrid createHorizontalGrid(GridCoordSystem coordSys) { CoordinateAxis xAxis = coordSys.getXHorizAxis(); CoordinateAxis yAxis = coordSys.getYHorizAxis(); boolean isLatLon = xAxis.getAxisType() == AxisType.Lon && yAxis.getAxisType() == AxisType.Lat; if (xAxis instanceof CoordinateAxis1D && yAxis instanceof CoordinateAxis1D) { ReferenceableAxis<Double> xRefAxis = createReferenceableAxis((CoordinateAxis1D) xAxis); ReferenceableAxis<Double> yRefAxis = createReferenceableAxis((CoordinateAxis1D) yAxis); if (isLatLon) { /* We can create a RectilinearGrid in lat-lon space */ if (xRefAxis instanceof RegularAxis && yRefAxis instanceof RegularAxis) { /* We can create a regular grid */ return new RegularGridImpl((RegularAxis) xRefAxis, (RegularAxis) yRefAxis, GISUtils.defaultGeographicCRS()); } else { /* Axes are not both regular */ return new RectilinearGridImpl(xRefAxis, yRefAxis, GISUtils.defaultGeographicCRS()); } } else { /* * Axes are not latitude and longitude so we need to create a * ReferenceableGrid that uses the coordinate system's * Projection object to convert from x and y to lat and lon */ return new CdmTransformedGrid(coordSys); } } else if (xAxis instanceof CoordinateAxis2D && yAxis instanceof CoordinateAxis2D) { /* The axis must be 2D so we have to create look-up tables */ if (!isLatLon) { throw new UnsupportedOperationException("Can't create a HorizontalGrid" + " from 2D coordinate axes that are not longitude and latitude."); } final CoordinateAxis2D lonAxis = (CoordinateAxis2D) xAxis; final CoordinateAxis2D latAxis = (CoordinateAxis2D) yAxis; Array2D<Number> lonVals = get2DCoordinateValues(lonAxis); Array2D<Number> latVals = get2DCoordinateValues(latAxis); return LookUpTableGrid.generate(lonVals, latVals); } else { /* Shouldn't get here */ throw new IllegalStateException("Inconsistent axis types"); } } public static Array2D<Number> get2DCoordinateValues(final CoordinateAxis2D axis) { return new Array2D<Number>(axis.getShape(0), axis.getShape(1)) { private static final long serialVersionUID = 1L; @Override public void set(Number value, int... coords) { throw new UnsupportedOperationException("This Array2D is immutable"); } @Override public Number get(int... coords) { return axis.getCoordValue(coords[0], coords[1]); } }; } /** * @param zAxis * the {@link CoordinateAxis1D} to create a {@link VerticalAxis} * from * @param isPositive * Whether increasing values * @return The resulting {@link VerticalAxis} */ public static VerticalAxis createVerticalAxis(CoordinateAxis1D zAxis, boolean isPositive) { if (zAxis == null) { return null; } boolean isPressure = false; String units = ""; List<Double> values = Collections.emptyList(); isPressure = zAxis.getAxisType() == AxisType.Pressure; units = zAxis.getUnitsString(); List<Double> zValues = new ArrayList<Double>(); for (double zVal : zAxis.getCoordValues()) { zValues.add(zVal); } values = Collections.unmodifiableList(zValues); VerticalCrs vCrs = new VerticalCrsImpl(units, isPressure, units.isEmpty(), isPositive); return new VerticalAxisImpl("Vertical Axis", values, vCrs); } /** * Creates a time axis from the given {@link GridCoordSystem} * * @param timeAxis * the {@link CoordinateAxis1DTime} defining the axis * @return a new {@link TimeAxis} */ public static TimeAxis createTimeAxis(CoordinateAxis1DTime timeAxis) { if (timeAxis == null) { return null; } Attribute cal = timeAxis.findAttribute("calendar"); String calString = cal == null ? null : cal.getStringValue().toLowerCase(); Chronology chron = getChronologyForString(calString); if (chron == null) { throw new IllegalArgumentException("The calendar system " + cal.getStringValue() + " cannot be handled"); } List<DateTime> timesteps = new ArrayList<DateTime>(); for (CalendarDate date : timeAxis.getCalendarDates()) { timesteps.add(new DateTime(date.getMillis(), chron)); } return new TimeAxisImpl("time", timesteps); } /* * Gets a Chronology from its CF string representation */ private static Chronology getChronologyForString(String chronologyString) { if (chronologyString == null || "gregorian".equalsIgnoreCase(chronologyString) || "standard".equalsIgnoreCase(chronologyString)) { return ISOChronology.getInstanceUTC(); } else if ("proleptic_gregorian".equalsIgnoreCase(chronologyString)) { return GregorianChronology.getInstanceUTC(); } else if ("julian".equalsIgnoreCase(chronologyString)) { return JulianChronology.getInstanceUTC(); } else if ("noleap".equalsIgnoreCase(chronologyString) || "365_day".equalsIgnoreCase(chronologyString)) { return NoLeapChronology.getInstanceUTC(); } else if ("all_leap".equalsIgnoreCase(chronologyString) || "366_day".equalsIgnoreCase(chronologyString)) { return AllLeapChronology.getInstanceUTC(); } else if ("360_day".equalsIgnoreCase(chronologyString)) { return ThreeSixtyDayChronology.getInstanceUTC(); } return null; } /** * Creates a {@link ReferenceableAxis} from the given * {@link CoordinateAxis1D}. Creates a longitude axis if * axis.getAxisType()==AxisType.Lon. * * @param axis * The {@link CoordinateAxis1D} to convert to a * {@link ReferenceableAxis} * @return An equivalent {@link ReferenceableAxis} */ public static ReferenceableAxis<Double> createReferenceableAxis(CoordinateAxis1D axis) { return createReferenceableAxis(axis, axis.getAxisType() == AxisType.Lon); } /** * Creates a {@link ReferenceableAxis} from the given * {@link CoordinateAxis1D}. * * @param axis * The {@link CoordinateAxis1D} to convert to a * {@link ReferenceableAxis} * @param isLongitude * true if this is a longitude axis ({@literal i.e.} wraps at 360 * degrees). * @return The equivalent {@link ReferenceableAxis} */ public static ReferenceableAxis<Double> createReferenceableAxis(CoordinateAxis1D axis, boolean isLongitude) { if (axis == null) throw new NullPointerException(); String name = axis.getFullName(); Attribute boundsAttr = axis.findAttribute("bounds"); if (boundsAttr != null) { /* * The cell bounds are specified by another variable in the data * file. */ List<Double> axisValues = new ArrayList<>(); List<Extent<Double>> axisBounds = new ArrayList<>(); for (int i = 0; i < axis.getSize(); i++) { double[] coordBounds = axis.getCoordBounds(i); if (coordBounds.length != 2) { throw new IllegalArgumentException( "You must specify exactly 2 boundary points for each axis point. " + coordBounds.length + " have been supplied"); } double min = coordBounds[0]; double max = coordBounds[1]; Extent<Double> cellBounds; if (min < max) { cellBounds = Extents.newExtent(min, max); } else { cellBounds = Extents.newExtent(max, min); } axisBounds.add(cellBounds); axisValues.add(axis.getCoordValue(i)); } return new DefinedBoundsAxis(name, axisValues, axisBounds, isLongitude); } else if (axis.isRegular()) { double aStart = axis.getStart(); int aSize = (int) axis.getSize(); double aInc = axis.getIncrement(); if (aSize == 1 && aInc == 0.0) { /* * This means that we have an axis with one single value. To * allow this to be visible on a map, we give it an arbitrary * size. */ aInc = 0.1; aStart = axis.getCoordValue(0); } RegularAxisImpl ret = new RegularAxisImpl(name, aStart, aInc, aSize, isLongitude); return ret; } else { double[] primVals = axis.getCoordValues(); List<Double> valsList = CollectionUtils.listFromDoubleArray(primVals); return new ReferenceableAxisImpl(name, valsList, isLongitude); } } /** * Expands a glob expression to give a List of paths to files. This method * recursively searches directories, allowing for glob expressions like * {@code "c:\\data\\200[6-7]\\*\\1*\\A*.nc"}. * * @param globExpression * The expression to expand * @return a {@link List} of {@link File}s matching the given glob * expression */ public static List<File> expandGlobExpression(String globExpression) { /* * Check whether the glob expression represents an absolute path. * Relative paths may cause unpredictable and platform-dependent * behaviour so we give a warning */ File globFile = new File(globExpression); if (!globFile.isAbsolute()) { log.warn("Using relative path for a dataset. This may cause unpredictable or platform-dependent behaviour. The use of absolute paths is recommended"); } /* * Break glob pattern into path components. To do this in a reliable and * platform-independent way we use methods of the File class, rather * than String.split(). */ List<String> pathComponents = new ArrayList<String>(); while (globFile != null) { /* * We "pop off" the last component of the glob pattern and place it * in the first component of the pathComponents List. We therefore * ensure that the pathComponents end up in the right order. */ File parent = globFile.getParentFile(); /* * For a top-level directory, getName() returns an empty string, * hence we use getPath() in this case */ String pathComponent = parent == null ? globFile.getPath() : globFile.getName(); pathComponents.add(0, pathComponent); globFile = parent; } /* * We must have at least two path components: one directory and one * filename or glob expression */ List<File> searchPaths = new ArrayList<File>(); searchPaths.add(new File(pathComponents.get(0))); /* Index of the glob path component */ int i = 1; while (i < pathComponents.size()) { FilenameFilter globFilter = new GlobFilenameFilter(pathComponents.get(i)); List<File> newSearchPaths = new ArrayList<File>(); /* Look for matches in all the current search paths */ for (File dir : searchPaths) { if (dir.isDirectory()) { /* * Workaround for automounters that don't make filesystems * appear unless they're poked do a listing on * searchpath/pathcomponent whether or not it exists, then * discard the results */ new File(dir, pathComponents.get(i)).list(); for (File match : dir.listFiles(globFilter)) { newSearchPaths.add(match); } } } /* * Next time we'll search based on these new matches and will use * the next globComponent */ searchPaths = newSearchPaths; i++; } /* * Now we've done all our searching, we'll only retain the files from * the list of search paths */ List<File> files = new ArrayList<File>(); for (File path : searchPaths) { if (path.isFile()) files.add(path); } return files; } }
/** * Custom function, set the function name will be implemented in the call supplement function */ public static class TearDownHookFunction extends AbstractFunction { @Override public AviatorObject call(Map<String, Object> env,AviatorObject type) { LogHelper.info("正在执行:{}方法,方法参数:{}",this.getName(),type.toString()); LogHelper.info("当前请求参数详细信息:{}", env.get("$REQUEST")); LogHelper.info("当前响应参数详细信息:{}", env.get("$RESPONSE")); return new AviatorString("defineResult"); } @Override public String getName() { return "tdFunction"; } }
/** * Add a SystemComponent as a system requirement. * * @param requirements SystemComponent representing a system component requirement */ public void addSystemComponent(final SystemComponent... requirements) { if (requirements == null) return; synchronized (systemComponents) { systemComponents.addAll(Arrays.asList(requirements)); } }
/** * The basic implementation of the {@link CustomBetSelectionBuilder} */ public class CustomBetSelectionBuilderImpl implements CustomBetSelectionBuilder { private URN eventId; private int marketId; private String outcomeId; private String specifiers; @Override public CustomBetSelectionBuilder setEventId(URN eventId) { this.eventId = eventId; return this; } @Override public CustomBetSelectionBuilder setMarketId(int marketId) { this.marketId = marketId; return this; } @Override public CustomBetSelectionBuilder setOutcomeId(String outcomeId) { this.outcomeId = outcomeId; return this; } @Override public CustomBetSelectionBuilder setSpecifiers(String specifiers) { this.specifiers = specifiers; return this; } @Override public Selection build() { Selection selection = new SelectionImpl(eventId, marketId, outcomeId, specifiers); eventId = null; marketId = 0; outcomeId = null; specifiers = null; return selection; } @Override public Selection build(URN eventId, int marketId, String specifiers, String outcomeId) { this.eventId = eventId; this.marketId = marketId; this.outcomeId = outcomeId; this.specifiers = specifiers; return build(); } }
class DataUrl: """ Decodes a Base64 encoded data URL """ exp = re.compile(r"data:(?P<mime>[^;]+);base64,(?P<content>.*)") def __init__(self, url: Text): m = self.exp.match(url) if not m: raise ValueError self.mime = m.group("mime") self.content = b64decode(m.group("content").encode())
// Grab a buffer from the available buffers or create a new buffer if none are available void FD3D12DefaultBufferPool::AllocDefaultResource(const D3D12_RESOURCE_DESC& Desc, FD3D12ResourceLocation& ResourceLocation, uint32 Alignment) { FD3D12Device* Device = GetParentDevice(); FD3D12Adapter* Adapter = Device->GetParentAdapter(); ResourceLocation.Clear(); if (Desc.Width == 0) return; const bool PoolResource = Desc.Width < Allocator->MaximumAllocationSizeForPooling && ((Desc.Width % (1024 * 64)) != 0); if (PoolResource) { check(Desc.Flags == Allocator->ResourceFlags); if (Allocator->TryAllocate(Desc.Width, Alignment, ResourceLocation)) { return; } } FD3D12Resource* NewResource = nullptr; VERIFYD3D12RESULT(Adapter->CreateBuffer(D3D12_HEAP_TYPE_DEFAULT, GetNodeMask(), GetVisibilityMask(), Desc.Width, &NewResource, Allocator->ResourceFlags)); SetName(NewResource, L"Stand Alone Default Buffer"); #if PIX_MEMORY_PROFILING && 0 { D3D12_RESOURCE_ALLOCATION_INFO Info = Device->GetDevice()->GetResourceAllocationInfo(0, 1, &Desc); PIXRecordMemoryAllocationEvent(ED3D12AllocatorID::DefaultBufferAllocatorFullResources, (void*)(NewResource->GetGPUVirtualAddress()), Info.SizeInBytes, 0); } #endif ResourceLocation.AsStandAlone(NewResource, Desc.Width); }
/** * Check gwt-user dependency matches plugin version */ private void checkGwtUserVersion() throws MojoExecutionException { InputStream inputStream = Thread.currentThread().getContextClassLoader() .getResourceAsStream( "org/codehaus/mojo/gwt/mojoGwtVersion.properties" ); Properties properties = new Properties(); try { properties.load( inputStream ); } catch (IOException e) { throw new MojoExecutionException( "Failed to load plugin properties", e ); } finally { IOUtils.closeQuietly( inputStream ); } Artifact gwtUser = project.getArtifactMap().get( GWT_USER ); if (gwtUser != null) { String mojoGwtVersion = properties.getProperty( "gwt.version" ); ArtifactVersion mojoGwtArtifactVersion = new DefaultArtifactVersion( mojoGwtVersion ); ArtifactVersion userGwtArtifactVersion = new DefaultArtifactVersion( gwtUser.getVersion() ); if ( userGwtArtifactVersion.compareTo( mojoGwtArtifactVersion ) < 0 ) { getLog().warn( "Your project declares dependency on gwt-user " + gwtUser.getVersion() + ". This plugin is designed for at least gwt version " + mojoGwtVersion ); } } }
Hillary Clinton: Aliens May Have Visited Us Already; Vows to Get to the Bottom of UFOs Jon Podhoretz said, correctly, that if any Republican candidate had said this, it would be screaming news all over every dial and website -- a perfect Otherizing story illustrating the strange, anti-scientific beliefs of Republicans. You remember how much play Ben Carson's speculations about pyramid granaries got. You remember how much play Ben Carson's speculations about pyramid granaries got. But Hillary Clinton says it, so it's just a story about a politician But Hillary Clinton says it, so it's just a story about a politician interested in citizen concerns. Hillary Clinton says that aliens may have already visited humanity. "I think we may have been [visited already]. We don't know for sure," the Democratic presidential front-runner told The Conway Daily Sun during a campaign stop in New Hampshire last week. Her comment came after being asked about her husband Bill Clinton's comments during an appearance on late-night show "Jimmy Kimmel Live" in 2014, when he suggested that extraterrestrial life could exist. ... Hillary Clinton told a Sun reporter that she would "get to the bottom" of UFOs. Whether extraterrestrial life exists, and whether extraterrestrial life has visited us, are too very, very different questions. Whether extraterrestrial life exists, and whether extraterrestrial life has visited us, are too very,different questions. Based on probabilities -- assuming that no godlike miracle is required for life, and further assuming that even if a godlike miracle is required for life, this miracle didn't occur only on earth -- it is extremely likely, bordering on certain, that extraterrestrial life exists, or has existed. Based on probabilities -- assuming that no godlike miracle is required for life, and further assuming that even if a godlike miracle is required for life, this miracle didn't occur only on earth -- it is extremely likely, bordering on certain, that extraterrestrial life exists, or has existed. Based on the fact that there is absolutely no evidence of any visitations to earth by such life, apart from stray sightings of something in the sky (which are more easily explainable as metereological effects), the answer to the question whether the earth has been visited is a "almost certainly not." Based on the fact that there is absolutely no evidence of any visitations to earth by such life, apart from stray sightings ofin the sky (which are more easily explainable as metereological effects), the answer to the question whether the earth has been visited is a "almost certainly Add into that where are they?," i.e., why does evidence of them not abound, why are we not picking up radio signals from their past communications (and certainly they would have gone through a radio age, just as we did, even if they then moved on to something more clever) -- and the apparently impossibility of FTL travel and so forth. Add into that the famous question asked by Fermi -- "If aliens exist: Then," i.e., why does evidence of them not abound, why are we not picking up radio signals from their past communications (and certainly they would have gone through a radio age, just as we did, even if they then moved on to something more clever) -- and the apparently impossibility of FTL travel and so forth. Add into that that the only explanation for why we don't know about alien visitations is a massive and vicious government conspiracy to keep us ignorant, which is a strange position to take for someone who was once, in her own telling, co-president of the US. Add into that that the only explanation for why we don't know about alien visitations is a, which is a strange position to take for someone who was once, in her own telling, co-president of the US. But you know -- They Love science. But you know -- Theyscience. Just ask 'em. Just ask 'em. They'll tell you. They'll tell you. Mrs. Clinton's future head of OSTP. @Coondawg68 @KevinNR pic.twitter.com/yg3QjQiJO7 — Starless (@starless941) January 4, 2016 Posted by: Ace at 12:34 PM MuNuvians MeeNuvians Polls! Polls! Polls! Frequently Asked Questions The (Almost) Complete Paul Anka Integrity Kick Top Top Tens Greatest Hitjobs News/Chat
Table-driven and context-sensitive collage languages In this paper, we introduce the notions of context-sensitive and ET0L collage grammars as generalizations of context-free collage grammars. Both kinds of picture-generating devices are more powerful than the context-free case. Nevertheless, the size of collages in an ET0L collage language can be shown to grow at most exponentially. In contrast to this, there are no such bounds for context-sensitive collage languages because suitable pictorial representations of recursively enumerable sets of strings can be generated. On the other hand, it is still a conjecture that ET0L collage languages exist that are not context-sensitive.
<filename>pkg/execution/plugins/atlassian/secretenvvar/kubecompute/podsecretenvvar_plugin_test.go package kubecompute import ( "testing" smith_v1 "github.com/atlassian/smith/pkg/apis/smith/v1" smith_plugin "github.com/atlassian/smith/pkg/plugin" "github.com/atlassian/voyager/pkg/execution/plugins/atlassian/secretenvvar" plugin_testing "github.com/atlassian/voyager/pkg/execution/plugins/testing" sc_v1b1 "github.com/kubernetes-incubator/service-catalog/pkg/apis/servicecatalog/v1beta1" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" core_v1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/runtime" ) const ( defaultNamespace = "ns" ) func testEnvVars(t *testing.T, dependencies map[smith_v1.ResourceName]smith_plugin.Dependency, expectedResult map[string]string) { testEnvVarsFull(t, map[string]string{}, "", dependencies, expectedResult) } func testEnvVarsFull(t *testing.T, renameEnvVar map[string]string, ignoreKeyRegex string, dependencies map[smith_v1.ResourceName]smith_plugin.Dependency, expectedResult map[string]string) { p, err := New() require.NoError(t, err) spec, err := runtime.DefaultUnstructuredConverter.ToUnstructured(&secretenvvar.PodSpec{ RenameEnvVar: renameEnvVar, IgnoreKeyRegex: ignoreKeyRegex, }) require.NoError(t, err) context := &smith_plugin.Context{ Namespace: defaultNamespace, Dependencies: dependencies, } result, err := p.Process(spec, context) require.NoError(t, err) secret := result.Object.(*core_v1.Secret) for expectedKey, expectedVal := range expectedResult { actualVal, ok := secret.Data[expectedKey] require.True(t, ok, "missing output secret key: %q", expectedKey) assert.Equal(t, expectedVal, string(actualVal)) } assert.Equal(t, len(expectedResult), len(secret.Data)) } func TestNoDependencies(t *testing.T) { t.Parallel() testEnvVars(t, map[smith_v1.ResourceName]smith_plugin.Dependency{}, map[string]string{}) } func TestBasic(t *testing.T) { t.Parallel() input1 := map[string][]byte{ "a-b-c": []byte("val1"), } input2 := map[string][]byte{ "a-b-c": []byte("val2"), } expectedResult := map[string]string{ "SECRET1_MYSECRET": "1", "SQS_QUEUE1_A_B_C": "val1", "SQS_QUEUE2_A_B_C": "val2", } dependencies := map[smith_v1.ResourceName]smith_plugin.Dependency{ "x": plugin_testing.ConstructBindingDependency("binding1", defaultNamespace, "secret1", "queue1", "sqs", input1), "y": plugin_testing.ConstructBindingDependency("binding2", defaultNamespace, "secret2", "queue2", "sqs", input2), "z": plugin_testing.ConstructSecretDependency("secret1", defaultNamespace, map[string][]byte{"MYSECRET": []byte("1")}), } testEnvVars(t, dependencies, expectedResult) } func TestDashReplacement(t *testing.T) { t.Parallel() input1 := map[string][]byte{ "a0DASH0b0DASH0c": []byte("val1"), } input2 := map[string][]byte{ "a-b0DASH0c": []byte("val2"), } expectedResult := map[string]string{ "SQS_QUEUE1_A_B_C": "val1", "SQS_QUEUE2_A_B_C": "val2", } dependencies := map[smith_v1.ResourceName]smith_plugin.Dependency{ "x": plugin_testing.ConstructBindingDependency("binding1", defaultNamespace, "secret1", "queue1", "sqs", input1), "y": plugin_testing.ConstructBindingDependency("binding2", defaultNamespace, "secret2", "queue2", "sqs", input2), } testEnvVars(t, dependencies, expectedResult) } func TestAnnotationPrefixes(t *testing.T) { t.Parallel() input1 := map[string][]byte{ "a-b-c": []byte("val1"), } input2 := map[string][]byte{ "a-b-c": []byte("val2"), } expectedResult := map[string]string{ "MYSQS_QUEUE1_A_B_C": "val1", "OTHERSQS_QUEUE2_A_B_C": "val2", } dependencies := map[smith_v1.ResourceName]smith_plugin.Dependency{ "x": plugin_testing.ConstructBindingDependency("binding1", defaultNamespace, "secret1", "queue1", "sqs", input1), "y": plugin_testing.ConstructBindingDependency("binding2", defaultNamespace, "secret2", "queue2", "sqs", input2), } dependencies["x"].Auxiliary[0].(*sc_v1b1.ServiceInstance).Annotations = map[string]string{ "voyager.atl-paas.net/envResourcePrefix": "MYSQS", } dependencies["y"].Auxiliary[0].(*sc_v1b1.ServiceInstance).Annotations = map[string]string{ "voyager.atl-paas.net/envResourcePrefix": "MYSQS", } dependencies["y"].Actual.(*sc_v1b1.ServiceBinding).Annotations = map[string]string{ "voyager.atl-paas.net/envResourcePrefix": "OTHERSQS", } testEnvVars(t, dependencies, expectedResult) } func TestIgnoreKeyRegex(t *testing.T) { t.Parallel() input1 := map[string][]byte{ "a-b-c": []byte("val1"), } input2 := map[string][]byte{ "a-b-c": []byte("val2"), } expectedResult := map[string]string{ "SQS_QUEUE1_A_B_C": "val1", } dependencies := map[smith_v1.ResourceName]smith_plugin.Dependency{ "x": plugin_testing.ConstructBindingDependency("binding1", defaultNamespace, "secret1", "queue1", "sqs", input1), "y": plugin_testing.ConstructBindingDependency("binding2", defaultNamespace, "secret2", "queue2", "sqs", input2), "z": plugin_testing.ConstructSecretDependency("secret1", defaultNamespace, map[string][]byte{"MYSECRET": []byte("1")}), } testEnvVarsFull(t, map[string]string{}, "^S(ECRET1|QS_.*2)", dependencies, expectedResult) } func TestRenameEnvVars(t *testing.T) { t.Parallel() input1 := map[string][]byte{ "a-b-c": []byte("val1"), } input2 := map[string][]byte{ "a-b-c": []byte("val2"), } expectedResult := map[string]string{ "SECRET1_MYSECRET": "val1", "SQS_QUEUE1_A_B_C": "1", "SQS_QUEUE2_A_B_C": "val2", } dependencies := map[smith_v1.ResourceName]smith_plugin.Dependency{ "x": plugin_testing.ConstructBindingDependency("binding1", defaultNamespace, "secret1", "queue1", "sqs", input1), "y": plugin_testing.ConstructBindingDependency("binding2", defaultNamespace, "secret2", "queue2", "sqs", input2), "z": plugin_testing.ConstructSecretDependency("secret1", defaultNamespace, map[string][]byte{"MYSECRET": []byte("1")}), } testEnvVarsFull(t, map[string]string{ "SQS_QUEUE1_A_B_C": "SECRET1_MYSECRET", "SECRET1_MYSECRET": "SQS_QUEUE1_A_B_C", }, "", dependencies, expectedResult) } func TestRenameAsapKey(t *testing.T) { t.Parallel() asapCredentials := map[string][]byte{ "AUDIENCE": []byte("audience"), "ISSUER": []byte("issuer"), "KEY_ID": []byte("keyId"), "PRIVATE_KEY": []byte("privateKey"), } expectedResult := map[string]string{ "ASAP_AUDIENCE": "audience", "ASAP_ISSUER": "issuer", "ASAP_KEY_ID": "keyId", "ASAP_PRIVATE_KEY": "privateKey", } dependencies := map[smith_v1.ResourceName]smith_plugin.Dependency{ "x": plugin_testing.ConstructBindingDependency( "asap-binding", defaultNamespace, "asap-secret", "myasap", "asap", asapCredentials), } dependencies["x"].Auxiliary[0].(*sc_v1b1.ServiceInstance).Annotations = map[string]string{ "voyager.atl-paas.net/envResourcePrefix": "ASAPKey", } testEnvVarsFull(t, map[string]string{}, "", dependencies, expectedResult) }
//! Contains offspring selection algorithms. use crate::construction::heuristics::InsertionContext; use crate::solver::RefinementContext; mod naive_selection; pub use self::naive_selection::NaiveSelection; /// A trait which specifies evolution selection behavior. pub trait Selection { /// Selects parent from population based on refinement process state. fn select_parents(&self, refinement_ctx: &RefinementContext) -> Vec<InsertionContext>; }
/** * Method for building and signing packets. * * @param function (byte) protocol function * @param hostPrivKey (PrivateKey) host's private key object * @param p1 (byte) the first parameter * @param p2 (byte) the second parameter * @param data (byte[]) packet data * @return CommandAPDU packet with signature * @throws Exception when signing fails */ private CommandAPDU GenAndSignPacket(byte function, PrivateKey hostPrivKey, byte p1, byte p2, byte[] data) throws Exception { if ((5 + data.length + 72) > 256) { throw new IllegalArgumentException("Packet data length is too long."); } byte[] packetCopy = new byte[5 + data.length]; packetCopy[0] = Consts.CLA_MPC; packetCopy[1] = function; packetCopy[2] = p1; packetCopy[3] = p2; packetCopy[4] = (byte) (data.length); System.arraycopy(data, 0, packetCopy, 5, data.length); Signature ecdsaSign = Signature.getInstance("SHA256withECDSA"); ecdsaSign.initSign(hostPrivKey); ecdsaSign.update(packetCopy); byte[] signature = ecdsaSign.sign(); byte[] packetDataWSignature = Util.concat(data, Util.concat(Util.shortToByteArray(signature.length), signature)); return new CommandAPDU(Consts.CLA_MPC, function, p1, p2, packetDataWSignature); }
package com.outlook.bigkun.concepts; import java.util.Hashtable; /** * @author zhanghk * @since 2019/8/2 */ public class FlyweightFactory { private Hashtable flyweights = new Hashtable(); public Flyweight getFlyweight(Object key) { Flyweight flyweight = (Flyweight) flyweights.get(key); if (flyweight == null) { flyweight = new ConcreteFlyweight(new ConcreteIntrinsicState(key.toString())); flyweights.put(key, flyweight); } return flyweight; } }
import sys readline = sys.stdin.readline N = int(readline()) p = 0 ans = set() for i in range(1,N + 1): p += i ans.add(i) if p >= N: break if p == N: for a in ans: print(a) exit(0) ans.remove(p - N) for a in ans: print(a)
Normative Aesthetics: The Ideal Audience and Art Education Reader-Response Criticism proposes a new way of looking at literary text. One of the writers of this criticism discusses about 'literary competence' which entails the idea of 'ideal reader'. The writer proposes the idea of 'ideal audience' to work with fields of art other than literature. In an attempt to give the proper appreciation of art, norms are required, and norms in art can be arrived at using the concept of 'ideal audience'. It is a fact that norms are normally contextual, not universal, and change according to the change in the concept of art creation. Then the question is why take the trouble of forming norms if they keep changing?
Several dozen professors in Harvard University’s Faculty of Arts and Sciences have signed a letter to their dean asking for formal oversight of the massive open online courses offered by Harvard through edX, a MOOC provider co-founded by the university. While “some faculty are tremendously excited about HarvardX,” the professors wrote, referring to the university’s brand within the edX platform, “other are deeply concerned about the program’s cost and consequences.” The letter, published on Thursday in The Harvard Crimson, the student newspaper, was signed by 58 professors in the university division, which is known as the FAS. The authors go on to ask Michael D. Smith, dean of the FAS, to appoint a committee of arts and sciences faculty members “to draft a set of ethical and educational principles” that would govern their colleagues’ involvement in Harvard-branded MOOCs. The letter comes several weeks after the philosophy department at San Jose State University wrote an open letter to Michael Sandel, a government professor at Harvard, expressing concerns about how edX’s plans to license its MOOCs to cash-strapped colleges like San Jose State might have devastating consequences for professors at those colleges. That letter was on the minds of Harvard’s FAS professors when they convened to discuss MOOCs at a meeting this month, said Peter J. Burgard, a professor of German at Harvard. In their letter to Dean Smith, the Harvard professors allude to “many critical questions,” as yet unanswered, about “the impact online courses will have on the higher-education system as a whole.” But, perhaps more immediately, the professors were irked that Harvard had become so deeply involved in MOOCs before consulting with them, said Mr. Burgard. “It was presented to us as a fait accompli in the fall, and the first time we had a chance to ask questions about it was in the winter,” he said. This spring, as MOOC skeptics have grown more vocal, “there’s a bit of a groundswell of people thinking now more seriously about it,” Mr. Burgard said. But the 58 signatories of the letter, out of the hundreds of professors in the FAS, might not get their way. In a written statement to The Chronicle, a spokesman for the dean suggested that a new committee, consisting solely of FAS professors, was not in the cards. “The dean will continue to work with the two existing HarvardX faculty committees—on both of which the FAS has the largest faculty representation of any Harvard school—and with faculty members across the FAS to support innovation in teaching and learning and to promote ongoing dialogue and debate of these important issues,” said Jeff Neal, the dean’s director of communications. “Dean Smith wants to ensure that every individual member of the faculty continues to have the academic freedom to structure their courses and their pedagogy as they deem appropriate, and the institutional support those efforts require,” Mr. Neal continued. “HarvardX is a university initiative that supports faculty innovation in online and blended models of teaching. Ultimately, HarvardX consists of the faculty members—from the FAS and across the university—who have chosen to undertake these innovative efforts.”
<gh_stars>0 // WARNING: This file was autogenerated by jni-bindgen. Any changes to this file may be lost!!! #[cfg(any(feature = "all", feature = "android-os-Environment"))] __jni_bindgen! { /// public class [Environment](https://developer.android.com/reference/android/os/Environment.html) /// /// Required feature: android-os-Environment public class Environment ("android/os/Environment") extends crate::java::lang::Object { /// [Environment](https://developer.android.com/reference/android/os/Environment.html#Environment()) pub fn new<'env>(__jni_env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::result::Result<__jni_bindgen::Local<'env, crate::android::os::Environment>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/os/Environment", java.flags == PUBLIC, .name == "<init>", .descriptor == "()V" unsafe { let __jni_args = []; let (__jni_class, __jni_method) = __jni_env.require_class_method("android/os/Environment\0", "<init>\0", "()V\0"); __jni_env.new_object_a(__jni_class, __jni_method, __jni_args.as_ptr()) } } /// [getRootDirectory](https://developer.android.com/reference/android/os/Environment.html#getRootDirectory()) /// /// Required features: "java-io-File" #[cfg(any(feature = "all", all(feature = "java-io-File")))] pub fn getRootDirectory<'env>(__jni_env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::io::File>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/os/Environment", java.flags == PUBLIC | STATIC, .name == "getRootDirectory", .descriptor == "()Ljava/io/File;" unsafe { let __jni_args = []; let (__jni_class, __jni_method) = __jni_env.require_class_static_method("android/os/Environment\0", "getRootDirectory\0", "()Ljava/io/File;\0"); __jni_env.call_static_object_method_a(__jni_class, __jni_method, __jni_args.as_ptr()) } } /// [getDataDirectory](https://developer.android.com/reference/android/os/Environment.html#getDataDirectory()) /// /// Required features: "java-io-File" #[cfg(any(feature = "all", all(feature = "java-io-File")))] pub fn getDataDirectory<'env>(__jni_env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::io::File>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/os/Environment", java.flags == PUBLIC | STATIC, .name == "getDataDirectory", .descriptor == "()Ljava/io/File;" unsafe { let __jni_args = []; let (__jni_class, __jni_method) = __jni_env.require_class_static_method("android/os/Environment\0", "getDataDirectory\0", "()Ljava/io/File;\0"); __jni_env.call_static_object_method_a(__jni_class, __jni_method, __jni_args.as_ptr()) } } /// [getExternalStorageDirectory](https://developer.android.com/reference/android/os/Environment.html#getExternalStorageDirectory()) /// /// Required features: "java-io-File" #[cfg(any(feature = "all", all(feature = "java-io-File")))] #[deprecated] pub fn getExternalStorageDirectory<'env>(__jni_env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::io::File>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/os/Environment", java.flags == PUBLIC | STATIC, .name == "getExternalStorageDirectory", .descriptor == "()Ljava/io/File;" unsafe { let __jni_args = []; let (__jni_class, __jni_method) = __jni_env.require_class_static_method("android/os/Environment\0", "getExternalStorageDirectory\0", "()Ljava/io/File;\0"); __jni_env.call_static_object_method_a(__jni_class, __jni_method, __jni_args.as_ptr()) } } /// [getExternalStoragePublicDirectory](https://developer.android.com/reference/android/os/Environment.html#getExternalStoragePublicDirectory(java.lang.String)) /// /// Required features: "java-io-File", "java-lang-String" #[cfg(any(feature = "all", all(feature = "java-io-File", feature = "java-lang-String")))] #[deprecated] pub fn getExternalStoragePublicDirectory<'env>(__jni_env: &'env __jni_bindgen::Env, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::io::File>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/os/Environment", java.flags == PUBLIC | STATIC, .name == "getExternalStoragePublicDirectory", .descriptor == "(Ljava/lang/String;)Ljava/io/File;" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())]; let (__jni_class, __jni_method) = __jni_env.require_class_static_method("android/os/Environment\0", "getExternalStoragePublicDirectory\0", "(Ljava/lang/String;)Ljava/io/File;\0"); __jni_env.call_static_object_method_a(__jni_class, __jni_method, __jni_args.as_ptr()) } } /// [getDownloadCacheDirectory](https://developer.android.com/reference/android/os/Environment.html#getDownloadCacheDirectory()) /// /// Required features: "java-io-File" #[cfg(any(feature = "all", all(feature = "java-io-File")))] pub fn getDownloadCacheDirectory<'env>(__jni_env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::io::File>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/os/Environment", java.flags == PUBLIC | STATIC, .name == "getDownloadCacheDirectory", .descriptor == "()Ljava/io/File;" unsafe { let __jni_args = []; let (__jni_class, __jni_method) = __jni_env.require_class_static_method("android/os/Environment\0", "getDownloadCacheDirectory\0", "()Ljava/io/File;\0"); __jni_env.call_static_object_method_a(__jni_class, __jni_method, __jni_args.as_ptr()) } } /// [getExternalStorageState](https://developer.android.com/reference/android/os/Environment.html#getExternalStorageState()) /// /// Required features: "java-lang-String" #[cfg(any(feature = "all", all(feature = "java-lang-String")))] pub fn getExternalStorageState<'env>(__jni_env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::lang::String>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/os/Environment", java.flags == PUBLIC | STATIC, .name == "getExternalStorageState", .descriptor == "()Ljava/lang/String;" unsafe { let __jni_args = []; let (__jni_class, __jni_method) = __jni_env.require_class_static_method("android/os/Environment\0", "getExternalStorageState\0", "()Ljava/lang/String;\0"); __jni_env.call_static_object_method_a(__jni_class, __jni_method, __jni_args.as_ptr()) } } /// [getStorageState](https://developer.android.com/reference/android/os/Environment.html#getStorageState(java.io.File)) /// /// Required features: "java-io-File", "java-lang-String" #[cfg(any(feature = "all", all(feature = "java-io-File", feature = "java-lang-String")))] #[deprecated] pub fn getStorageState<'env>(__jni_env: &'env __jni_bindgen::Env, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::io::File>>) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::lang::String>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/os/Environment", java.flags == PUBLIC | STATIC, .name == "getStorageState", .descriptor == "(Ljava/io/File;)Ljava/lang/String;" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())]; let (__jni_class, __jni_method) = __jni_env.require_class_static_method("android/os/Environment\0", "getStorageState\0", "(Ljava/io/File;)Ljava/lang/String;\0"); __jni_env.call_static_object_method_a(__jni_class, __jni_method, __jni_args.as_ptr()) } } /// [getExternalStorageState](https://developer.android.com/reference/android/os/Environment.html#getExternalStorageState(java.io.File)) /// /// Required features: "java-io-File", "java-lang-String" #[cfg(any(feature = "all", all(feature = "java-io-File", feature = "java-lang-String")))] pub fn getExternalStorageState_File<'env>(__jni_env: &'env __jni_bindgen::Env, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::io::File>>) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::lang::String>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/os/Environment", java.flags == PUBLIC | STATIC, .name == "getExternalStorageState", .descriptor == "(Ljava/io/File;)Ljava/lang/String;" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())]; let (__jni_class, __jni_method) = __jni_env.require_class_static_method("android/os/Environment\0", "getExternalStorageState\0", "(Ljava/io/File;)Ljava/lang/String;\0"); __jni_env.call_static_object_method_a(__jni_class, __jni_method, __jni_args.as_ptr()) } } /// [isExternalStorageRemovable](https://developer.android.com/reference/android/os/Environment.html#isExternalStorageRemovable()) pub fn isExternalStorageRemovable<'env>(__jni_env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::result::Result<bool, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/os/Environment", java.flags == PUBLIC | STATIC, .name == "isExternalStorageRemovable", .descriptor == "()Z" unsafe { let __jni_args = []; let (__jni_class, __jni_method) = __jni_env.require_class_static_method("android/os/Environment\0", "isExternalStorageRemovable\0", "()Z\0"); __jni_env.call_static_boolean_method_a(__jni_class, __jni_method, __jni_args.as_ptr()) } } /// [isExternalStorageRemovable](https://developer.android.com/reference/android/os/Environment.html#isExternalStorageRemovable(java.io.File)) /// /// Required features: "java-io-File" #[cfg(any(feature = "all", all(feature = "java-io-File")))] pub fn isExternalStorageRemovable_File<'env>(__jni_env: &'env __jni_bindgen::Env, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::io::File>>) -> __jni_bindgen::std::result::Result<bool, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/os/Environment", java.flags == PUBLIC | STATIC, .name == "isExternalStorageRemovable", .descriptor == "(Ljava/io/File;)Z" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())]; let (__jni_class, __jni_method) = __jni_env.require_class_static_method("android/os/Environment\0", "isExternalStorageRemovable\0", "(Ljava/io/File;)Z\0"); __jni_env.call_static_boolean_method_a(__jni_class, __jni_method, __jni_args.as_ptr()) } } /// [isExternalStorageEmulated](https://developer.android.com/reference/android/os/Environment.html#isExternalStorageEmulated()) pub fn isExternalStorageEmulated<'env>(__jni_env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::result::Result<bool, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/os/Environment", java.flags == PUBLIC | STATIC, .name == "isExternalStorageEmulated", .descriptor == "()Z" unsafe { let __jni_args = []; let (__jni_class, __jni_method) = __jni_env.require_class_static_method("android/os/Environment\0", "isExternalStorageEmulated\0", "()Z\0"); __jni_env.call_static_boolean_method_a(__jni_class, __jni_method, __jni_args.as_ptr()) } } /// [isExternalStorageEmulated](https://developer.android.com/reference/android/os/Environment.html#isExternalStorageEmulated(java.io.File)) /// /// Required features: "java-io-File" #[cfg(any(feature = "all", all(feature = "java-io-File")))] pub fn isExternalStorageEmulated_File<'env>(__jni_env: &'env __jni_bindgen::Env, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::io::File>>) -> __jni_bindgen::std::result::Result<bool, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/os/Environment", java.flags == PUBLIC | STATIC, .name == "isExternalStorageEmulated", .descriptor == "(Ljava/io/File;)Z" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())]; let (__jni_class, __jni_method) = __jni_env.require_class_static_method("android/os/Environment\0", "isExternalStorageEmulated\0", "(Ljava/io/File;)Z\0"); __jni_env.call_static_boolean_method_a(__jni_class, __jni_method, __jni_args.as_ptr()) } } /// [isExternalStorageLegacy](https://developer.android.com/reference/android/os/Environment.html#isExternalStorageLegacy()) pub fn isExternalStorageLegacy<'env>(__jni_env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::result::Result<bool, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/os/Environment", java.flags == PUBLIC | STATIC, .name == "isExternalStorageLegacy", .descriptor == "()Z" unsafe { let __jni_args = []; let (__jni_class, __jni_method) = __jni_env.require_class_static_method("android/os/Environment\0", "isExternalStorageLegacy\0", "()Z\0"); __jni_env.call_static_boolean_method_a(__jni_class, __jni_method, __jni_args.as_ptr()) } } /// [isExternalStorageLegacy](https://developer.android.com/reference/android/os/Environment.html#isExternalStorageLegacy(java.io.File)) /// /// Required features: "java-io-File" #[cfg(any(feature = "all", all(feature = "java-io-File")))] pub fn isExternalStorageLegacy_File<'env>(__jni_env: &'env __jni_bindgen::Env, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::io::File>>) -> __jni_bindgen::std::result::Result<bool, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/os/Environment", java.flags == PUBLIC | STATIC, .name == "isExternalStorageLegacy", .descriptor == "(Ljava/io/File;)Z" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())]; let (__jni_class, __jni_method) = __jni_env.require_class_static_method("android/os/Environment\0", "isExternalStorageLegacy\0", "(Ljava/io/File;)Z\0"); __jni_env.call_static_boolean_method_a(__jni_class, __jni_method, __jni_args.as_ptr()) } } /// **get** public static [DIRECTORY_ALARMS](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_ALARMS) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn DIRECTORY_ALARMS<'env>(env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::lang::String>> { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_ALARMS\0", "Ljava/lang/String;\0"); env.get_static_object_field(class, field) } } /// **set** public static [DIRECTORY_ALARMS](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_ALARMS) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn set_DIRECTORY_ALARMS<'env, 'obj>(env: &'env __jni_bindgen::Env, value: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'obj crate::java::lang::String>>) { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_ALARMS\0", "Ljava/lang/String;\0"); env.set_static_object_field(class, field, value) } } /// **get** public static [DIRECTORY_AUDIOBOOKS](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_AUDIOBOOKS) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn DIRECTORY_AUDIOBOOKS<'env>(env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::lang::String>> { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_AUDIOBOOKS\0", "Ljava/lang/String;\0"); env.get_static_object_field(class, field) } } /// **set** public static [DIRECTORY_AUDIOBOOKS](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_AUDIOBOOKS) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn set_DIRECTORY_AUDIOBOOKS<'env, 'obj>(env: &'env __jni_bindgen::Env, value: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'obj crate::java::lang::String>>) { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_AUDIOBOOKS\0", "Ljava/lang/String;\0"); env.set_static_object_field(class, field, value) } } /// **get** public static [DIRECTORY_DCIM](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_DCIM) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn DIRECTORY_DCIM<'env>(env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::lang::String>> { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_DCIM\0", "Ljava/lang/String;\0"); env.get_static_object_field(class, field) } } /// **set** public static [DIRECTORY_DCIM](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_DCIM) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn set_DIRECTORY_DCIM<'env, 'obj>(env: &'env __jni_bindgen::Env, value: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'obj crate::java::lang::String>>) { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_DCIM\0", "Ljava/lang/String;\0"); env.set_static_object_field(class, field, value) } } /// **get** public static [DIRECTORY_DOCUMENTS](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_DOCUMENTS) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn DIRECTORY_DOCUMENTS<'env>(env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::lang::String>> { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_DOCUMENTS\0", "Ljava/lang/String;\0"); env.get_static_object_field(class, field) } } /// **set** public static [DIRECTORY_DOCUMENTS](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_DOCUMENTS) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn set_DIRECTORY_DOCUMENTS<'env, 'obj>(env: &'env __jni_bindgen::Env, value: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'obj crate::java::lang::String>>) { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_DOCUMENTS\0", "Ljava/lang/String;\0"); env.set_static_object_field(class, field, value) } } /// **get** public static [DIRECTORY_DOWNLOADS](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_DOWNLOADS) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn DIRECTORY_DOWNLOADS<'env>(env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::lang::String>> { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_DOWNLOADS\0", "Ljava/lang/String;\0"); env.get_static_object_field(class, field) } } /// **set** public static [DIRECTORY_DOWNLOADS](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_DOWNLOADS) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn set_DIRECTORY_DOWNLOADS<'env, 'obj>(env: &'env __jni_bindgen::Env, value: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'obj crate::java::lang::String>>) { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_DOWNLOADS\0", "Ljava/lang/String;\0"); env.set_static_object_field(class, field, value) } } /// **get** public static [DIRECTORY_MOVIES](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_MOVIES) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn DIRECTORY_MOVIES<'env>(env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::lang::String>> { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_MOVIES\0", "Ljava/lang/String;\0"); env.get_static_object_field(class, field) } } /// **set** public static [DIRECTORY_MOVIES](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_MOVIES) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn set_DIRECTORY_MOVIES<'env, 'obj>(env: &'env __jni_bindgen::Env, value: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'obj crate::java::lang::String>>) { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_MOVIES\0", "Ljava/lang/String;\0"); env.set_static_object_field(class, field, value) } } /// **get** public static [DIRECTORY_MUSIC](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_MUSIC) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn DIRECTORY_MUSIC<'env>(env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::lang::String>> { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_MUSIC\0", "Ljava/lang/String;\0"); env.get_static_object_field(class, field) } } /// **set** public static [DIRECTORY_MUSIC](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_MUSIC) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn set_DIRECTORY_MUSIC<'env, 'obj>(env: &'env __jni_bindgen::Env, value: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'obj crate::java::lang::String>>) { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_MUSIC\0", "Ljava/lang/String;\0"); env.set_static_object_field(class, field, value) } } /// **get** public static [DIRECTORY_NOTIFICATIONS](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_NOTIFICATIONS) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn DIRECTORY_NOTIFICATIONS<'env>(env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::lang::String>> { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_NOTIFICATIONS\0", "Ljava/lang/String;\0"); env.get_static_object_field(class, field) } } /// **set** public static [DIRECTORY_NOTIFICATIONS](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_NOTIFICATIONS) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn set_DIRECTORY_NOTIFICATIONS<'env, 'obj>(env: &'env __jni_bindgen::Env, value: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'obj crate::java::lang::String>>) { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_NOTIFICATIONS\0", "Ljava/lang/String;\0"); env.set_static_object_field(class, field, value) } } /// **get** public static [DIRECTORY_PICTURES](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_PICTURES) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn DIRECTORY_PICTURES<'env>(env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::lang::String>> { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_PICTURES\0", "Ljava/lang/String;\0"); env.get_static_object_field(class, field) } } /// **set** public static [DIRECTORY_PICTURES](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_PICTURES) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn set_DIRECTORY_PICTURES<'env, 'obj>(env: &'env __jni_bindgen::Env, value: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'obj crate::java::lang::String>>) { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_PICTURES\0", "Ljava/lang/String;\0"); env.set_static_object_field(class, field, value) } } /// **get** public static [DIRECTORY_PODCASTS](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_PODCASTS) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn DIRECTORY_PODCASTS<'env>(env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::lang::String>> { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_PODCASTS\0", "Ljava/lang/String;\0"); env.get_static_object_field(class, field) } } /// **set** public static [DIRECTORY_PODCASTS](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_PODCASTS) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn set_DIRECTORY_PODCASTS<'env, 'obj>(env: &'env __jni_bindgen::Env, value: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'obj crate::java::lang::String>>) { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_PODCASTS\0", "Ljava/lang/String;\0"); env.set_static_object_field(class, field, value) } } /// **get** public static [DIRECTORY_RINGTONES](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_RINGTONES) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn DIRECTORY_RINGTONES<'env>(env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::lang::String>> { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_RINGTONES\0", "Ljava/lang/String;\0"); env.get_static_object_field(class, field) } } /// **set** public static [DIRECTORY_RINGTONES](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_RINGTONES) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn set_DIRECTORY_RINGTONES<'env, 'obj>(env: &'env __jni_bindgen::Env, value: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'obj crate::java::lang::String>>) { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_RINGTONES\0", "Ljava/lang/String;\0"); env.set_static_object_field(class, field, value) } } /// **get** public static [DIRECTORY_SCREENSHOTS](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_SCREENSHOTS) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn DIRECTORY_SCREENSHOTS<'env>(env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::lang::String>> { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_SCREENSHOTS\0", "Ljava/lang/String;\0"); env.get_static_object_field(class, field) } } /// **set** public static [DIRECTORY_SCREENSHOTS](https://developer.android.com/reference/android/os/Environment.html#DIRECTORY_SCREENSHOTS) /// /// Required feature: java-lang-String #[cfg(any(feature = "all", feature = "java-lang-String"))] pub fn set_DIRECTORY_SCREENSHOTS<'env, 'obj>(env: &'env __jni_bindgen::Env, value: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'obj crate::java::lang::String>>) { unsafe { let (class, field) = env.require_class_static_field("android/os/Environment\0", "DIRECTORY_SCREENSHOTS\0", "Ljava/lang/String;\0"); env.set_static_object_field(class, field, value) } } /// public static final [MEDIA_BAD_REMOVAL](https://developer.android.com/reference/android/os/Environment.html#MEDIA_BAD_REMOVAL) pub const MEDIA_BAD_REMOVAL : &'static str = "bad_removal"; /// public static final [MEDIA_CHECKING](https://developer.android.com/reference/android/os/Environment.html#MEDIA_CHECKING) pub const MEDIA_CHECKING : &'static str = "checking"; /// public static final [MEDIA_EJECTING](https://developer.android.com/reference/android/os/Environment.html#MEDIA_EJECTING) pub const MEDIA_EJECTING : &'static str = "ejecting"; /// public static final [MEDIA_MOUNTED](https://developer.android.com/reference/android/os/Environment.html#MEDIA_MOUNTED) pub const MEDIA_MOUNTED : &'static str = "mounted"; /// public static final [MEDIA_MOUNTED_READ_ONLY](https://developer.android.com/reference/android/os/Environment.html#MEDIA_MOUNTED_READ_ONLY) pub const MEDIA_MOUNTED_READ_ONLY : &'static str = "mounted_ro"; /// public static final [MEDIA_NOFS](https://developer.android.com/reference/android/os/Environment.html#MEDIA_NOFS) pub const MEDIA_NOFS : &'static str = "nofs"; /// public static final [MEDIA_REMOVED](https://developer.android.com/reference/android/os/Environment.html#MEDIA_REMOVED) pub const MEDIA_REMOVED : &'static str = "removed"; /// public static final [MEDIA_SHARED](https://developer.android.com/reference/android/os/Environment.html#MEDIA_SHARED) pub const MEDIA_SHARED : &'static str = "shared"; /// public static final [MEDIA_UNKNOWN](https://developer.android.com/reference/android/os/Environment.html#MEDIA_UNKNOWN) pub const MEDIA_UNKNOWN : &'static str = "unknown"; /// public static final [MEDIA_UNMOUNTABLE](https://developer.android.com/reference/android/os/Environment.html#MEDIA_UNMOUNTABLE) pub const MEDIA_UNMOUNTABLE : &'static str = "unmountable"; /// public static final [MEDIA_UNMOUNTED](https://developer.android.com/reference/android/os/Environment.html#MEDIA_UNMOUNTED) pub const MEDIA_UNMOUNTED : &'static str = "unmounted"; } }
Immunohistochemical localization of steroidogenic enzymes in corpus luteum of wild sika deer during early mating season. We analyzed the localization of steroidogenic enzymes (P450 scc, 3 beta HSD, P450 arom and P450 c17) in the corpora lutea of two Hokkaido sika deer (Cervus nippon yesoensis) during the early mating season. Two corpora lutea were found in each female and the timing of formation of the corpora lutea seemed different. P450 scc, and 3 beta HSD, positive luteal cells were found in both corpora lutea. The existence of two functional corpora lutea from the early mating season through pregnancy suggests that progesterone secreted by two or more corpora lutea is necessary for maintenance of pregnancy in sika deer.
def _variable(self): if self.token.type == 'SYMBOL': name = self.token.value self._advance() return Variable(name) else: self._error('SYMBOL')
"Heaven has no rage like love to hatred turned, nor hell a fury like a woman scorned," playwright William Congreve wrote. In a suburb of Chicago, fury has overtaken a jilted bride who is suing her former fiancé for the wedding costs. Dominique Buttitta, dumped four days before the wedding was to take place, is seeking damages of $95,942 from Vito Salerno to cover wedding expenses and the cost of the lawsuit. According to the case filed on Friday in Cook County Circuit Court, Buttitta, an attorney, claimed breach of promise to marry and intentional infliction of emotional distress. Buttitta and Salerno began dating in March 2007, were engaged in December 2007 and the wedding was set for October 2 of this year in Barrington, Illinois. Buttitta and Salerno did not return requests for comment. The case claims Salerno told others the wedding was cancelled but denied saying so when Buttitta confronted him on Sept. 25. According to the suit, he called off the wedding two days later. The suit's itemized list of expenses included over $30,000 for the banquet hall, $11,000 on lighting and flowers, $10,000 for an orchestra, $7,550 for a photographer, $5,000 for a wedding dress and accessories, and $1,700 for wedding favors. The expenses include other non-refundable purchases, including a bridesmaid luncheon, bridal shower and a deposit for a wedding planner. The suit also claims that one month before the wedding date, the groom attended a bachelor party at an adult entertainment business called the Pink Monkey. He allegedly engaged in lewd acts, including lap dances with strippers, of which the bride was unaware. John Zielinski, a civil attorney not connected with the case, said because Buttitta is suing to recover damages from documented expenses, she has a chance. Some states, including Illinois and Georgia, have "breach of promise" to marry laws. Zielinski said it would be more difficult to win monetary damages just for emotional distress. "Illinois has limited recovery to actual damages so you can't complain about extra damages," he said. This is not the first time a heartbroken bride sued her former groom. In July 2008, a jury in Georgia ordered Wayne Gibbs to pay RoseMary Shell $150,000 for breaking off their engagement three days before their wedding. Wells had moved from Florida to Georgia to be with her ex-fiancé and in doing so took a pay cut to do so.
Lionsgate’s “Divergent” has opened impressively with $4.9 million at late-night shows on Thursday night in the U.S. “We’re off to a great start with strong numbers from all regions of the country, urban, suburban and rural alike,” said Lionsgate CEO Jon Feltheimer. “We’re confident that ‘Divergent’ is on its way to becoming another important franchise for us, and we have just greenlit the second film, ‘Insurgent.’” Tracking for the futuristic adventure, starring Shailene Woodley and Theo James (pictured above), has been in the $60 million plus range for its opening weekend. By comparison, Paramount’s “World War Z” generated $3.6 million from late-night shows on its way to a $66 million opening weekend last June. Universal’s “Despicable Me 2” generated $4.7 million at late-nights in July to start an $83 million opening weekend. The projected U.S. start for “Divergent” is similar to Summit’s first “Twilight” film, which opened to $69 million in 2008 but well below the $152 million start for Lionsgate’s “The Hunger Games” in 2012. “Divergent,” directed by Neil Burger, opens at 3,936 locations including 346 Imax screens. The film is based on the first book in Veronica Roth’s trilogy. Woodley plays a 16-year-old in a tightly-controlled dystopia who does not fit into any of society’s five factions. “Insurgent” will begin shooting in May in Atlanta with “Red” director Robert Schwentke helming. Lionsgate has set a March 20, 2015, release date for “Insurgent” and a March 18, 2016, date for “Allegiant,” based on the final book. Disney is also opening “Muppets Most Wanted” this weekend with tracking indicating an opening in the $25 million range.
/* $Header: /Users/ikriest/CVS/mops/external_forcing_mops_biogeochem.c,v 1.1.1.1 2015/06/03 17:02:09 ikriest Exp $ */ /* $Name: mops-1_2 $*/ #define MIN(x, y) (((x) < (y)) ? (x) : (y)) #define MAX(x, y) (((x) > (y)) ? (x) : (y)) #include <stdio.h> #include <stdlib.h> #include <string.h> #undef READ_SWRAD #include "petscmat.h" #include "petsc_matvec_utils.h" #include "tmm_timer.h" #include "tmm_forcing_utils.h" #include "tmm_profile_utils.h" #include "tmm_profile_data.h" #include "tmm_misfit.h" #include "tmm_main.h" #include "mops_biogeochem_tmm.h" /* Macros to map tracer names to vectors */ /* Note: for MOPS, we have the following tracer assignement:*/ /* v[0] PO4; v[1] DOP; v[2] O2; v[3] Phy; v[4] Zoo; v[5] Det; v[6] NO3 */ /* Additionally, or option -DCARBON: v[7] DIC; v[8] Alk */ /* Note that this also affects BGC_PARAMS.h and the runsccript(s) */ #define TR v[0] #define DIC v[7] #define ALK v[8] #define localDIC localTR[7] #define localALK localTR[8] Vec Ts,Ss; PetscScalar *localTs,*localSs; PetscScalar **localTR, **localJTR; #ifdef CARBON PetscScalar *localph; PetscBool useVirtualFlux = PETSC_FALSE; PetscScalar *localEmP; PeriodicArray localEmPp; Vec surfVolFrac; #endif PetscScalar *localwind,*localfice,*localdz,*localatmosp; PetscScalar *localswrad, *localtau; #ifndef READ_SWRAD PetscScalar *locallatitude; #endif PetscBool useSeparateBiogeochemTimeStepping = PETSC_FALSE; PetscInt numBiogeochemStepsPerOceanStep = 1; PetscInt nzmax,nzeuph; PetscScalar DeltaT,TheoDeltaT; PetscScalar *drF; PeriodicVec Tsp, Ssp; PeriodicArray localwindp,localficep,localatmospp; #ifdef READ_SWRAD PeriodicArray localswradp; #endif PetscInt numBiogeochemPeriods; PetscScalar *tdpBiogeochem; /* arrays for periodic forcing */ PetscBool periodicBiogeochemForcing = PETSC_FALSE; PetscScalar biogeochemCyclePeriod, biogeochemCycleStep; PetscInt toModel = 1; PetscInt fromModel = 2; PetscBool readBGCParams = PETSC_FALSE; PetscInt numBGCParams = 0; char bgcParamsFile[PETSC_MAX_PATH_LEN]; PetscScalar *bgcparams; PetscScalar *localdA; PetscInt maxValsToRead; #ifdef CARBON /* atmospheric model variables */ char *pCO2atmFiles[2]; PetscInt numpCO2atm_hist = 0; PetscScalar *TpCO2atm_hist, *pCO2atm_hist; PetscBool fixedAtmosCO2 = PETSC_TRUE; char pCO2atmIniFile[PETSC_MAX_PATH_LEN]; PetscBool useAtmModel = PETSC_FALSE; PetscScalar pCO2atm_ini = 280.0; /* default initial value */ PetscScalar pCO2atm = 280.0; /* default initial value */ PetscScalar ppmToPgC=2.1324; PetscScalar atmModelDeltaT; PetscScalar secPerYear=86400.0*360.0; PetscScalar Focean=0.0; PetscScalar localFocean=0.0; PetscScalar Foceanint = 0.0; PetscInt atmModelUpdateTimeSteps=1; PetscInt atmWriteSteps; PetscBool atmAppendOutput; FILE *atmfptime; char atmOutTimeFile[PETSC_MAX_PATH_LEN]; #endif PetscScalar runoff_ini = 0.0; PetscScalar GRunoff; /* Global runoff, calculated from burial */ PetscScalar *localrunoffvol; /* volume supplied by runoff */ PetscScalar localFburial = 0.0; PetscScalar Fburial=0.0; PetscInt burialSumSteps; char runoffOutTimeFile[PETSC_MAX_PATH_LEN]; char runoffIniFile[PETSC_MAX_PATH_LEN]; FILE *runofffptime; PetscScalar totalA = 0.0; PetscBool calcDiagnostics = PETSC_FALSE; PetscInt diagNumTimeSteps, diagStartTimeStep, diagCount; PetscBool appendDiagnostics = PETSC_FALSE; /* Add model specific diagnostic variables below */ Vec fbgc1, fbgc2, fbgc3, fbgc4, fbgc5, fbgc6, fbgc7, fbgc1avg, fbgc2avg, fbgc3avg, fbgc4avg, fbgc5avg, fbgc6avg, fbgc7avg; PetscViewer fdfbgc1avg, fdfbgc2avg, fdfbgc3avg, fdfbgc4avg, fdfbgc5avg, fdfbgc6avg, fdfbgc7avg; PetscScalar *localfbgc1, *localfbgc2, *localfbgc3, *localfbgc4, *localfbgc5, *localfbgc6, *localfbgc7; #ifdef CARBON PetscScalar *localco2airseafluxdiag, *localco2airseafluxdiagavg; #endif PetscBool MYTRUE = PETSC_TRUE, MYFALSE = PETSC_FALSE; #if defined (FORSPINUP) || defined (FORJACOBIAN) PetscScalar relaxTau[50], relaxLambda[50], relaxValue[50]; PetscBool relaxTracer = PETSC_FALSE; #endif /* -----------------------------------------------------------------------------------------------------------*/ /* -----------------------------------------------------------------------------------------------------------*/ /* -----------------------------------------------------------------------------------------------------------*/ /* -----------------------------------------------------------------------------------------------------------*/ #undef __FUNCT__ #define __FUNCT__ "iniExternalForcing" PetscErrorCode iniExternalForcing(PetscScalar tc, PetscInt Iter, PetscInt numTracers, Vec *v, Vec *ut) { PetscErrorCode ierr; PetscInt ip, kl, nzloc; PetscInt itr; PetscViewer fd; int fp; PetscBool flg; PetscInt it; PetscScalar myTime; PetscScalar zero = 0.0; PetscScalar DaysPerYear = 360.0; #if defined (FORSPINUP) || defined (FORJACOBIAN) ierr = PetscOptionsHasName(NULL,NULL,"-relax_tracer",&relaxTracer);CHKERRQ(ierr); if (relaxTracer) { maxValsToRead = numTracers; ierr = PetscOptionsGetRealArray(NULL,NULL,"-relax_tau",relaxTau,&maxValsToRead,&flg); if (!flg) SETERRQ(PETSC_COMM_WORLD,1,"Must indicate tracer relaxation tau with the -relax_tau option"); if (maxValsToRead != numTracers) { SETERRQ(PETSC_COMM_WORLD,1,"Insufficient number of relaxation tau values specified"); } maxValsToRead = numTracers; ierr = PetscOptionsGetRealArray(NULL,NULL,"-relax_value",relaxValue,&maxValsToRead,&flg); if (!flg) SETERRQ(PETSC_COMM_WORLD,1,"Must indicate relaxation values with the -relax_value option"); if (maxValsToRead != numTracers) { SETERRQ(PETSC_COMM_WORLD,1,"Insufficient number of relaxation values specified"); } for (itr=0; itr<numTracers; itr++) { if (relaxTau[itr]>0.0) { relaxLambda[itr]=1.0/relaxTau[itr]; } else { relaxLambda[itr]=0.0; relaxValue[itr]=0.0; } ierr = PetscPrintf(PETSC_COMM_WORLD,"Tracer %d relaxation lambda=%15.11f, relaxation value=%10.8f\n",itr,relaxLambda[itr],relaxValue[itr]);CHKERRQ(ierr); } } #endif for (itr=0; itr<numTracers; itr++) { ierr = VecSet(ut[itr],zero); CHKERRQ(ierr); } ierr = VecGetArrays(v,numTracers,&localTR);CHKERRQ(ierr); ierr = VecGetArrays(ut,numTracers,&localJTR);CHKERRQ(ierr); ierr = PetscOptionsHasName(NULL,NULL,"-separate_biogeochem_time_stepping",&useSeparateBiogeochemTimeStepping);CHKERRQ(ierr); #if defined (FORSPINUP) || defined (FORJACOBIAN) if (useSeparateBiogeochemTimeStepping) { SETERRQ(PETSC_COMM_WORLD,1,"Cannot use the -separate_biogeochem_time_stepping option with SPINUP or JACOBIAN "); } #endif if (useSeparateBiogeochemTimeStepping) { fromModel = 3; ierr = PetscPrintf(PETSC_COMM_WORLD,"Biogeochem model will be time-stepped independently\n");CHKERRQ(ierr); } ierr = PetscOptionsGetInt(NULL,NULL,"-num_biogeochem_steps_per_ocean_step",&numBiogeochemStepsPerOceanStep,&flg);CHKERRQ(ierr); ierr = PetscPrintf(PETSC_COMM_WORLD,"Number of biogeochem model time steps per ocean time step = %d\n",numBiogeochemStepsPerOceanStep);CHKERRQ(ierr); ierr = PetscOptionsGetInt(NULL,NULL,"-nzeuph",&nzeuph,&flg);CHKERRQ(ierr); if (!flg) SETERRQ(PETSC_COMM_WORLD,1,"Must indicate number of euphotic zone layers with the -nzeuph option"); ierr = PetscPrintf(PETSC_COMM_WORLD,"Number of euphotic zone layers is %d \n",nzeuph);CHKERRQ(ierr); ierr = PetscOptionsGetReal(NULL,NULL,"-biogeochem_deltat",&DeltaT,&flg);CHKERRQ(ierr); if (!flg) SETERRQ(PETSC_COMM_WORLD,1,"Must indicate biogeochemical time step in seconds with the -biogeochem_deltat option"); ierr = PetscPrintf(PETSC_COMM_WORLD,"Ocean time step for BGC length is %12.7f seconds\n",DeltaT);CHKERRQ(ierr); TheoDeltaT = DaysPerYear*86400.0*deltaTClock; ierr = PetscPrintf(PETSC_COMM_WORLD,"Check: using a year length of %12.3f days \n",DaysPerYear);CHKERRQ(ierr); ierr = PetscPrintf(PETSC_COMM_WORLD,"Theoretical ocean time step length for BGC is then %12.7f seconds\n",TheoDeltaT);CHKERRQ(ierr); ierr = PetscOptionsHasName(NULL,NULL,"-periodic_biogeochem_forcing",&periodicBiogeochemForcing);CHKERRQ(ierr); if (periodicBiogeochemForcing) { ierr=PetscPrintf(PETSC_COMM_WORLD,"Periodic biogeochemical forcing specified\n");CHKERRQ(ierr); /* read time data */ /* IMPORTANT: time units must be the same as that used by the toplevel driver */ ierr = PetscOptionsGetReal(NULL,NULL,"-periodic_biogeochem_cycle_period",&biogeochemCyclePeriod,&flg);CHKERRQ(ierr); if (!flg) SETERRQ(PETSC_COMM_WORLD,1,"Must indicate biogeochemical forcing cycling time with the -periodic_biogeochem_cycle_period option"); ierr = PetscOptionsGetReal(NULL,NULL,"-periodic_biogeochem_cycle_step",&biogeochemCycleStep,&flg);CHKERRQ(ierr); if (!flg) SETERRQ(PETSC_COMM_WORLD,1,"Must indicate biogeochemical forcing cycling step with the -periodic_biogeochem_cycle_step option"); numBiogeochemPeriods=biogeochemCyclePeriod/biogeochemCycleStep; /* array for holding extended time array */ PetscMalloc((numBiogeochemPeriods+2)*sizeof(PetscScalar), &tdpBiogeochem); ierr = PetscPrintf(PETSC_COMM_WORLD,"Periodic biogeochemical forcing specified at times:\n");CHKERRQ(ierr); for (it=0; it<=numBiogeochemPeriods+1; it++) { tdpBiogeochem[it]=(-biogeochemCycleStep/2.0) + it*biogeochemCycleStep; ierr = PetscPrintf(PETSC_COMM_WORLD,"tdpBiogeochem=%10.5f\n", tdpBiogeochem[it]);CHKERRQ(ierr); } } /* Read T and S */ ierr = VecDuplicate(TR,&Ts);CHKERRQ(ierr); ierr = VecDuplicate(TR,&Ss);CHKERRQ(ierr); if (periodicBiogeochemForcing) { Tsp.firstTime = PETSC_TRUE; Ssp.firstTime = PETSC_TRUE; } else { ierr = PetscViewerBinaryOpen(PETSC_COMM_WORLD,"Ts.petsc",FILE_MODE_READ,&fd);CHKERRQ(ierr); ierr = VecLoad(Ts,fd);CHKERRQ(ierr); /* IntoVector */ ierr = PetscViewerDestroy(&fd);CHKERRQ(ierr); ierr = PetscViewerBinaryOpen(PETSC_COMM_WORLD,"Ss.petsc",FILE_MODE_READ,&fd);CHKERRQ(ierr); ierr = VecLoad(Ss,fd);CHKERRQ(ierr); /* IntoVector */ ierr = PetscViewerDestroy(&fd);CHKERRQ(ierr); } ierr = VecGetArray(Ts,&localTs);CHKERRQ(ierr); ierr = VecGetArray(Ss,&localSs);CHKERRQ(ierr); ierr = PetscPrintf(PETSC_COMM_WORLD,"Done reading T/S\n");CHKERRQ(ierr); /* Need this for atmospheric exchange, river runoff, ... */ ierr = PetscMalloc(lNumProfiles*sizeof(PetscScalar),&localdA);CHKERRQ(ierr); ierr = readProfileSurfaceScalarData("dA.bin",localdA,1); PetscScalar localA = 0.0; for (ip=0; ip<lNumProfiles; ip++) { localA = localA+localdA[ip]; } MPI_Allreduce(&localA, &totalA, 1, MPI_DOUBLE, MPI_SUM, PETSC_COMM_WORLD); /* global surface area */ #ifdef CARBON ierr = PetscOptionsHasName(NULL,NULL,"-use_atm_model",&useAtmModel);CHKERRQ(ierr); if (useAtmModel) { ierr = PetscPrintf(PETSC_COMM_WORLD,"Using interactive atmospheric model\n");CHKERRQ(ierr); /* overwrite default value */ ierr = PetscOptionsGetReal(NULL,NULL,"-pco2atm_ini",&pCO2atm_ini,&flg);CHKERRQ(ierr); /* read from command line */ if (!flg) { ierr = PetscOptionsGetString(NULL,NULL,"-pco2atm_ini_file",pCO2atmIniFile,PETSC_MAX_PATH_LEN-1,&flg);CHKERRQ(ierr); if (flg) { /* read from binary file */ ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,pCO2atmIniFile,FILE_MODE_READ,&fd);CHKERRQ(ierr); ierr = PetscViewerBinaryGetDescriptor(fd,&fp);CHKERRQ(ierr); ierr = PetscBinaryRead(fp,&pCO2atm_ini,1,NULL,PETSC_SCALAR);CHKERRQ(ierr); ierr = PetscViewerDestroy(&fd);CHKERRQ(ierr); } } pCO2atm = pCO2atm_ini; ierr = PetscPrintf(PETSC_COMM_WORLD,"Using initial atmospheric pCO2 of %g ppm\n",pCO2atm);CHKERRQ(ierr); ierr = PetscOptionsGetInt(NULL,NULL,"-atm_write_steps",&atmWriteSteps,&flg);CHKERRQ(ierr); if (!flg) SETERRQ(PETSC_COMM_WORLD,1,"Must indicate atmospheric model output step with the -atm_write_steps option"); ierr = PetscOptionsHasName(NULL,NULL,"-atm_append",&atmAppendOutput);CHKERRQ(ierr); if (atmAppendOutput) { ierr = PetscPrintf(PETSC_COMM_WORLD,"Atmospheric model output will be appended\n");CHKERRQ(ierr); } else { ierr = PetscPrintf(PETSC_COMM_WORLD,"Atmospheric model output will overwrite existing file(s)\n");CHKERRQ(ierr); } /* Output times */ ierr = PetscOptionsGetString(NULL,NULL,"-atm_time_file",atmOutTimeFile,PETSC_MAX_PATH_LEN-1,&flg);CHKERRQ(ierr); if (!flg) { strcpy(atmOutTimeFile,""); sprintf(atmOutTimeFile,"%s","atm_output_time.txt"); } ierr = PetscPrintf(PETSC_COMM_WORLD,"Atmospheric model output times will be written to %s\n",atmOutTimeFile);CHKERRQ(ierr); if (!atmAppendOutput) { ierr = PetscFOpen(PETSC_COMM_WORLD,atmOutTimeFile,"w",&atmfptime);CHKERRQ(ierr); ierr = PetscFPrintf(PETSC_COMM_WORLD,atmfptime,"%d %10.5f\n",Iter0,time0);CHKERRQ(ierr); ierr = PetscPrintf(PETSC_COMM_WORLD,"Writing atmospheric output at time %10.5f, step %d\n", tc,Iter);CHKERRQ(ierr); ierr = writeBinaryScalarData("pCO2atm_output.bin",&pCO2atm,1,PETSC_FALSE); ierr = writeBinaryScalarData("Foceanint_output.bin",&Focean,1,PETSC_FALSE); } else { ierr = PetscFOpen(PETSC_COMM_WORLD,atmOutTimeFile,"a",&atmfptime);CHKERRQ(ierr); ierr = PetscPrintf(PETSC_COMM_WORLD,"Atmospheric model output will be appended. Initial condition will NOT be written\n");CHKERRQ(ierr); } ierr = PetscOptionsGetInt(NULL,NULL,"-atm_update_steps",&atmModelUpdateTimeSteps,&flg);CHKERRQ(ierr); if ((maxSteps % atmModelUpdateTimeSteps)!=0) { SETERRQ(PETSC_COMM_WORLD,1,"maxSteps not divisible by atmModelUpdateTimeSteps!"); } if ((atmWriteSteps % atmModelUpdateTimeSteps)!=0) { SETERRQ(PETSC_COMM_WORLD,1,"atmWriteSteps not divisible by atmModelUpdateTimeSteps!"); } ierr = PetscPrintf(PETSC_COMM_WORLD,"Atmospheric model will be updated every %d time steps\n",atmModelUpdateTimeSteps);CHKERRQ(ierr); if (atmModelUpdateTimeSteps>1) { ierr = PetscPrintf(PETSC_COMM_WORLD,"WARNING: Focean and pCO2atm diagnostics may not be correct!\n");CHKERRQ(ierr); } atmModelDeltaT = atmModelUpdateTimeSteps*DeltaT/secPerYear; /* time step in years */ } else { /* not using atm model */ ierr = PetscPrintf(PETSC_COMM_WORLD,"Using prescribed atmospheric pCO2\n");CHKERRQ(ierr); /* prescribed atmospheric CO2 */ maxValsToRead = 2; pCO2atmFiles[0] = (char *) malloc(PETSC_MAX_PATH_LEN*sizeof(char)); /* time file */ pCO2atmFiles[1] = (char *) malloc(PETSC_MAX_PATH_LEN*sizeof(char)); /* atmospheric pCO2 history file */ ierr = PetscOptionsGetStringArray(NULL,NULL,"-pco2atm_history",pCO2atmFiles,&maxValsToRead,&flg);CHKERRQ(ierr); if (flg) { /* Read atmospheric pCO2 history */ if (maxValsToRead != 2) { SETERRQ(PETSC_COMM_WORLD,1,"Insufficient number of file names specified for atmospheric pCO2 history"); } fixedAtmosCO2 = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_WORLD,"Reading time-dependent atmospheric pCO2 history\n");CHKERRQ(ierr); /* read time data */ ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,pCO2atmFiles[0],FILE_MODE_READ,&fd);CHKERRQ(ierr); ierr = PetscViewerBinaryGetDescriptor(fd,&fp);CHKERRQ(ierr); ierr = PetscBinaryRead(fp,&numpCO2atm_hist,1,NULL,PETSC_INT);CHKERRQ(ierr); ierr = PetscPrintf(PETSC_COMM_WORLD,"Number of points in atmospheric history file is %d \n",numpCO2atm_hist);CHKERRQ(ierr); ierr = PetscMalloc(numpCO2atm_hist*sizeof(PetscScalar),&TpCO2atm_hist);CHKERRQ(ierr); ierr = PetscBinaryRead(fp,TpCO2atm_hist,numpCO2atm_hist,NULL,PETSC_SCALAR);CHKERRQ(ierr); ierr = PetscViewerDestroy(&fd);CHKERRQ(ierr); /* read atmospheric pCO2 data */ ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,pCO2atmFiles[1],FILE_MODE_READ,&fd);CHKERRQ(ierr); ierr = PetscViewerBinaryGetDescriptor(fd,&fp);CHKERRQ(ierr); ierr = PetscMalloc(numpCO2atm_hist*sizeof(PetscScalar),&pCO2atm_hist);CHKERRQ(ierr); ierr = PetscBinaryRead(fp,pCO2atm_hist,numpCO2atm_hist,NULL,PETSC_SCALAR);CHKERRQ(ierr); ierr = PetscViewerDestroy(&fd);CHKERRQ(ierr); pCO2atm = pCO2atm_hist[0]; } else { ierr = PetscOptionsGetReal(NULL,NULL,"-pco2atm",&pCO2atm,&flg);CHKERRQ(ierr); /* overwrite default value */ ierr = PetscPrintf(PETSC_COMM_WORLD,"Using fixed atmospheric pCO2 of %g ppm\n",pCO2atm);CHKERRQ(ierr); } } ierr = PetscMalloc(lNumProfiles*sizeof(PetscScalar),&localph);CHKERRQ(ierr); ierr = PetscOptionsHasName(NULL,NULL,"-use_virtual_flux",&useVirtualFlux);CHKERRQ(ierr); ierr = PetscMalloc(lNumProfiles*sizeof(PetscScalar),&localEmP);CHKERRQ(ierr); if (periodicBiogeochemForcing) { localEmPp.firstTime = PETSC_TRUE; localEmPp.arrayLength = lNumProfiles; } else { ierr = readProfileSurfaceScalarData("EmP.bin",localEmP,1); } if (useVirtualFlux) { ierr = VecDuplicate(TR,&surfVolFrac);CHKERRQ(ierr); ierr = PetscViewerBinaryOpen(PETSC_COMM_WORLD,"surface_volume_fraction.petsc",FILE_MODE_READ,&fd);CHKERRQ(ierr); ierr = VecLoad(surfVolFrac,fd);CHKERRQ(ierr); ierr = PetscViewerDestroy(&fd);CHKERRQ(ierr); } #endif ierr = PetscPrintf(PETSC_COMM_WORLD,"Using Burial-Runoff model\n");CHKERRQ(ierr); /* Define the interval over which to integrate global burial */ ierr = PetscOptionsGetInt(NULL,NULL,"-burial_sum_steps",&burialSumSteps,&flg);CHKERRQ(ierr); if (!flg) SETERRQ(PETSC_COMM_WORLD,1,"Must indicate burial integration interval with the -burial_sum_steps option"); if ((maxSteps % burialSumSteps)!=0) { SETERRQ(PETSC_COMM_WORLD,1,"maxSteps not divisible by burialSumSteps!"); } ierr = PetscPrintf(PETSC_COMM_WORLD,"Runoff be integrated over and written every %d time steps\n",burialSumSteps);CHKERRQ(ierr); /* set the name of the runoff time file */ ierr = PetscOptionsGetString(NULL,NULL,"-runoff_time_file",runoffOutTimeFile,PETSC_MAX_PATH_LEN-1,&flg);CHKERRQ(ierr); if (!flg) { strcpy(runoffOutTimeFile,""); sprintf(runoffOutTimeFile,"%s","runoff_output_time.txt"); } ierr = PetscPrintf(PETSC_COMM_WORLD,"Runoff output times will be written to %s\n",runoffOutTimeFile);CHKERRQ(ierr); /* set inititial runoff: overwrite default value with value from command line*/ ierr = PetscOptionsGetReal(NULL,NULL,"-runoff_ini",&runoff_ini,&flg);CHKERRQ(ierr); /* set inititial runoff: overwrite default value with value from file*/ if (!flg) { ierr = PetscOptionsGetString(NULL,NULL,"-runoff_ini_file",runoffIniFile,PETSC_MAX_PATH_LEN-1,&flg);CHKERRQ(ierr); if (flg) { /* read from binary file */ ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,runoffIniFile,FILE_MODE_READ,&fd);CHKERRQ(ierr); ierr = PetscViewerBinaryGetDescriptor(fd,&fp);CHKERRQ(ierr); ierr = PetscBinaryRead(fp,&runoff_ini,1,NULL,PETSC_SCALAR);CHKERRQ(ierr); ierr = PetscViewerDestroy(&fd);CHKERRQ(ierr); } } GRunoff = runoff_ini; ierr = PetscPrintf(PETSC_COMM_WORLD,"Using initial runoff of %g Gmol P/d\n",GRunoff);CHKERRQ(ierr); /* if run is continued, always append runoff and output times */ if (Iter0>0) { ierr = PetscPrintf(PETSC_COMM_WORLD,"Runoff output will be appended\n");CHKERRQ(ierr); ierr = PetscFOpen(PETSC_COMM_WORLD,runoffOutTimeFile,"a",&runofffptime);CHKERRQ(ierr); ierr = PetscPrintf(PETSC_COMM_WORLD,"Initial runoff output will not be written\n");CHKERRQ(ierr); } else { ierr = PetscPrintf(PETSC_COMM_WORLD,"Runoff output will overwrite existing file(s)\n");CHKERRQ(ierr); ierr = PetscFOpen(PETSC_COMM_WORLD,runoffOutTimeFile,"w",&runofffptime);CHKERRQ(ierr); ierr = PetscFPrintf(PETSC_COMM_WORLD,runofffptime,"%d %10.5f\n",Iter0,time0);CHKERRQ(ierr); ierr = writeBinaryScalarData("Grunoff_output.bin",&GRunoff,1,PETSC_FALSE); ierr = PetscPrintf(PETSC_COMM_WORLD,"Writing runoff output at time %10.5f, step %d\n", tc,Iter);CHKERRQ(ierr); } /* fraction of global river runoff in each box, divided by the box volume (a 3D field) */ /* Note: VecLoadVecIntoArray resides in petsc_matvec_utils.c and is not a generic petsc function*/ ierr = PetscMalloc(lSize*sizeof(PetscScalar),&localrunoffvol);CHKERRQ(ierr); #ifdef RUNOFF ierr = PetscPrintf(PETSC_COMM_WORLD,"Runoff will be supplied via rivers %g\n");CHKERRQ(ierr); ierr = VecLoadVecIntoArray(TR,"runoff_volume_annual.petsc",localrunoffvol);CHKERRQ(ierr); #else ierr = PetscPrintf(PETSC_COMM_WORLD,"Runoff will be distributed over total ocean area of %g\n",totalA);CHKERRQ(ierr); ierr = VecLoadVecIntoArray(TR,"dz.petsc",localrunoffvol);CHKERRQ(ierr); /* IK: loading dz.petsc is just a dummy for now; runoff will be divided by dz(1) in BGC_MODEL.F */ #endif /* Grid arrays */ ierr = PetscMalloc(lSize*sizeof(PetscScalar),&localdz);CHKERRQ(ierr); ierr = VecLoadVecIntoArray(TR,"dz.petsc",localdz);CHKERRQ(ierr); ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,"drF.bin",FILE_MODE_READ,&fd);CHKERRQ(ierr); ierr = PetscViewerBinaryGetDescriptor(fd,&fp);CHKERRQ(ierr); ierr = PetscBinaryRead(fp,&nzmax,1,NULL,PETSC_INT);CHKERRQ(ierr); ierr = PetscPrintf(PETSC_COMM_WORLD,"Number of vertical layers is %d \n",nzmax);CHKERRQ(ierr); ierr = PetscMalloc(nzmax*sizeof(PetscScalar),&drF);CHKERRQ(ierr); ierr = PetscBinaryRead(fp,drF,nzmax,NULL,PETSC_SCALAR);CHKERRQ(ierr); ierr = PetscViewerDestroy(&fd);CHKERRQ(ierr); /* Forcing fields */ ierr = PetscMalloc(lNumProfiles*sizeof(PetscScalar),&localswrad);CHKERRQ(ierr); ierr = PetscMalloc(lNumProfiles*sizeof(PetscScalar),&localtau);CHKERRQ(ierr); #ifdef READ_SWRAD if (periodicBiogeochemForcing) { localswradp.firstTime = PETSC_TRUE; localswradp.arrayLength = lNumProfiles; } else { ierr = readProfileSurfaceScalarData("swrad.bin",localswrad,1); } #else ierr = PetscMalloc(lNumProfiles*sizeof(PetscScalar),&locallatitude);CHKERRQ(ierr); ierr = readProfileSurfaceScalarData("latitude.bin",locallatitude,1); #endif ierr = PetscMalloc(lNumProfiles*sizeof(PetscScalar),&localfice);CHKERRQ(ierr); if (periodicBiogeochemForcing) { localficep.firstTime = PETSC_TRUE; localficep.arrayLength = lNumProfiles; } else { ierr = readProfileSurfaceScalarData("fice.bin",localfice,1); } ierr = PetscMalloc(lNumProfiles*sizeof(PetscScalar),&localwind);CHKERRQ(ierr); if (periodicBiogeochemForcing) { localwindp.firstTime = PETSC_TRUE; localwindp.arrayLength = lNumProfiles; } else { ierr = readProfileSurfaceScalarData("wind.bin",localwind,1); } ierr = PetscMalloc(lNumProfiles*sizeof(PetscScalar),&localatmosp);CHKERRQ(ierr); if (periodicBiogeochemForcing) { localatmospp.firstTime = PETSC_TRUE; localatmospp.arrayLength = lNumProfiles; } else { ierr = readProfileSurfaceScalarData("atmosp.bin",localatmosp,1); } /* Initialize biogeochem model */ myTime = DeltaT*Iter; /* Iter should start at 0 */ if (periodicBiogeochemForcing) { ierr = interpPeriodicVector(tc,&Ts,biogeochemCyclePeriod,numBiogeochemPeriods,tdpBiogeochem,&Tsp,"Ts_"); ierr = interpPeriodicVector(tc,&Ss,biogeochemCyclePeriod,numBiogeochemPeriods,tdpBiogeochem,&Ssp,"Ss_"); #ifdef READ_SWRAD ierr = interpPeriodicProfileSurfaceScalarData(tc,localswrad,biogeochemCyclePeriod,numBiogeochemPeriods, tdpBiogeochem,&localswradp,"swrad_"); #else insolation_(&lNumProfiles,&myTime,&locallatitude[0],&localswrad[0],&localtau[0]); #endif ierr = interpPeriodicProfileSurfaceScalarData(tc,localfice,biogeochemCyclePeriod,numBiogeochemPeriods, tdpBiogeochem,&localficep,"fice_"); ierr = interpPeriodicProfileSurfaceScalarData(tc,localwind,biogeochemCyclePeriod,numBiogeochemPeriods, tdpBiogeochem,&localwindp,"wind_"); ierr = interpPeriodicProfileSurfaceScalarData(tc,localatmosp,biogeochemCyclePeriod,numBiogeochemPeriods, tdpBiogeochem,&localatmospp,"atmosp_"); #ifdef CARBON ierr = interpPeriodicProfileSurfaceScalarData(tc,localEmP,biogeochemCyclePeriod,numBiogeochemPeriods, tdpBiogeochem,&localEmPp,"EmP_"); #endif } else { #ifndef READ_SWRAD insolation_(&lNumProfiles,&myTime,&locallatitude[0],&localswrad[0],&localtau[0]); #endif } for (ip=0; ip<lNumProfiles; ip++) { nzloc=lProfileLength[ip]; kl=lStartIndices[ip]; for (itr=0; itr<numTracers; itr++) { mops_biogeochem_copy_data_(&nzloc,&itr,&localTR[itr][kl],&localJTR[itr][kl],&DeltaT,&toModel); } mops_biogeochem_ini_(&nzloc,&DeltaT, #ifdef CARBON &localph[ip], #endif &localTs[kl],&localSs[kl],&localdz[kl],&drF[0],&nzmax,&nzeuph, &numBiogeochemStepsPerOceanStep,&MYTRUE); } /* Read and overwrite default parameter values here */ ierr = PetscOptionsGetString(NULL,NULL,"-bgc_params_file",bgcParamsFile,PETSC_MAX_PATH_LEN-1,&readBGCParams);CHKERRQ(ierr); if (readBGCParams) { ierr = PetscOptionsGetInt(NULL,NULL,"-num_bgc_params",&numBGCParams,&flg);CHKERRQ(ierr); if (!flg) SETERRQ(PETSC_COMM_WORLD,1,"Must indicate number of BGC parameters to read with the -num_bgc_params option"); ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,bgcParamsFile,FILE_MODE_READ,&fd);CHKERRQ(ierr); ierr = PetscViewerBinaryGetDescriptor(fd,&fp);CHKERRQ(ierr); ierr = PetscMalloc(numBGCParams*sizeof(PetscScalar),&bgcparams);CHKERRQ(ierr); ierr = PetscBinaryRead(fp,bgcparams,numBGCParams,NULL,PETSC_SCALAR);CHKERRQ(ierr); ierr = PetscViewerDestroy(&fd);CHKERRQ(ierr); mops_biogeochem_set_params_(&numBGCParams,&bgcparams[0]); myTime = DeltaT*Iter; /* Iter should start at 0 */ for (ip=0; ip<lNumProfiles; ip++) { nzloc=lProfileLength[ip]; kl=lStartIndices[ip]; for (itr=0; itr<numTracers; itr++) { mops_biogeochem_copy_data_(&nzloc,&itr,&localTR[itr][kl],&localJTR[itr][kl],&DeltaT,&toModel); } mops_biogeochem_ini_(&nzloc,&DeltaT, #ifdef CARBON &localph[ip], #endif &localTs[kl],&localSs[kl],&localdz[kl],&drF[0],&nzmax,&nzeuph, &numBiogeochemStepsPerOceanStep,&MYFALSE); } } ierr = PetscOptionsHasName(NULL,NULL,"-calc_diagnostics",&calcDiagnostics);CHKERRQ(ierr); if (calcDiagnostics) { /*Data for diagnostics */ ierr = PetscOptionsGetInt(NULL,NULL,"-diag_start_time_step",&diagStartTimeStep,&flg);CHKERRQ(ierr); if (!flg) SETERRQ(PETSC_COMM_WORLD,1,"Must indicate (absolute) time step at which to start storing diagnostics with the -diag_start_time_step flag"); ierr = PetscOptionsGetInt(NULL,NULL,"-diag_time_steps",&diagNumTimeSteps,&flg);CHKERRQ(ierr); if (!flg) SETERRQ(PETSC_COMM_WORLD,1,"Must indicate number of time averaging diagnostics time steps with the -diag_time_step flag"); ierr = PetscPrintf(PETSC_COMM_WORLD,"Diagnostics will be computed starting at (and including) time step: %d\n", diagStartTimeStep);CHKERRQ(ierr); ierr = PetscPrintf(PETSC_COMM_WORLD,"Diagnostics will be computed over %d time steps\n", diagNumTimeSteps);CHKERRQ(ierr); ierr = VecDuplicate(TR,&fbgc1);CHKERRQ(ierr); ierr = VecSet(fbgc1,zero);CHKERRQ(ierr); ierr = VecGetArray(fbgc1,&localfbgc1);CHKERRQ(ierr); ierr = VecDuplicate(TR,&fbgc1avg);CHKERRQ(ierr); ierr = VecSet(fbgc1avg,zero);CHKERRQ(ierr); ierr = PetscViewerBinaryOpen(PETSC_COMM_WORLD,"fbgc1.petsc",FILE_MODE_WRITE,&fdfbgc1avg);CHKERRQ(ierr); ierr = VecDuplicate(TR,&fbgc2);CHKERRQ(ierr); ierr = VecSet(fbgc2,zero);CHKERRQ(ierr); ierr = VecGetArray(fbgc2,&localfbgc2);CHKERRQ(ierr); ierr = VecDuplicate(TR,&fbgc2avg);CHKERRQ(ierr); ierr = VecSet(fbgc2avg,zero);CHKERRQ(ierr); ierr = PetscViewerBinaryOpen(PETSC_COMM_WORLD,"fbgc2.petsc",FILE_MODE_WRITE,&fdfbgc2avg);CHKERRQ(ierr); ierr = VecDuplicate(TR,&fbgc3);CHKERRQ(ierr); ierr = VecSet(fbgc3,zero);CHKERRQ(ierr); ierr = VecGetArray(fbgc3,&localfbgc3);CHKERRQ(ierr); ierr = VecDuplicate(TR,&fbgc3avg);CHKERRQ(ierr); ierr = VecSet(fbgc3avg,zero);CHKERRQ(ierr); ierr = PetscViewerBinaryOpen(PETSC_COMM_WORLD,"fbgc3.petsc",FILE_MODE_WRITE,&fdfbgc3avg);CHKERRQ(ierr); ierr = VecDuplicate(TR,&fbgc4);CHKERRQ(ierr); ierr = VecSet(fbgc4,zero);CHKERRQ(ierr); ierr = VecGetArray(fbgc4,&localfbgc4);CHKERRQ(ierr); ierr = VecDuplicate(TR,&fbgc4avg);CHKERRQ(ierr); ierr = VecSet(fbgc4avg,zero);CHKERRQ(ierr); ierr = PetscViewerBinaryOpen(PETSC_COMM_WORLD,"fbgc4.petsc",FILE_MODE_WRITE,&fdfbgc4avg);CHKERRQ(ierr); ierr = VecDuplicate(TR,&fbgc5);CHKERRQ(ierr); ierr = VecSet(fbgc5,zero);CHKERRQ(ierr); ierr = VecGetArray(fbgc5,&localfbgc5);CHKERRQ(ierr); ierr = VecDuplicate(TR,&fbgc5avg);CHKERRQ(ierr); ierr = VecSet(fbgc5avg,zero);CHKERRQ(ierr); ierr = PetscViewerBinaryOpen(PETSC_COMM_WORLD,"fbgc5.petsc",FILE_MODE_WRITE,&fdfbgc5avg);CHKERRQ(ierr); ierr = VecDuplicate(TR,&fbgc6);CHKERRQ(ierr); ierr = VecSet(fbgc6,zero);CHKERRQ(ierr); ierr = VecGetArray(fbgc6,&localfbgc6);CHKERRQ(ierr); ierr = VecDuplicate(TR,&fbgc6avg);CHKERRQ(ierr); ierr = VecSet(fbgc6avg,zero);CHKERRQ(ierr); ierr = PetscViewerBinaryOpen(PETSC_COMM_WORLD,"fbgc6.petsc",FILE_MODE_WRITE,&fdfbgc6avg);CHKERRQ(ierr); ierr = VecDuplicate(TR,&fbgc7);CHKERRQ(ierr); ierr = VecSet(fbgc7,zero);CHKERRQ(ierr); ierr = VecGetArray(fbgc7,&localfbgc7);CHKERRQ(ierr); ierr = VecDuplicate(TR,&fbgc7avg);CHKERRQ(ierr); ierr = VecSet(fbgc7avg,zero);CHKERRQ(ierr); ierr = PetscViewerBinaryOpen(PETSC_COMM_WORLD,"fbgc7.petsc",FILE_MODE_WRITE,&fdfbgc7avg);CHKERRQ(ierr); #ifdef CARBON ierr = PetscMalloc(lNumProfiles*sizeof(PetscScalar),&localco2airseafluxdiag);CHKERRQ(ierr); ierr = PetscMalloc(lNumProfiles*sizeof(PetscScalar),&localco2airseafluxdiagavg);CHKERRQ(ierr); for (ip=0; ip<lNumProfiles; ip++) { localco2airseafluxdiag[ip]=0.0; localco2airseafluxdiagavg[ip]=0.0; } #endif diagCount=0; } return 0; } /* -----------------------------------------------------------------------------------------------------------*/ /* -----------------------------------------------------------------------------------------------------------*/ /* -----------------------------------------------------------------------------------------------------------*/ /* -----------------------------------------------------------------------------------------------------------*/ #undef __FUNCT__ #define __FUNCT__ "calcExternalForcing" PetscErrorCode calcExternalForcing(PetscScalar tc, PetscInt Iter, PetscInt iLoop, PetscInt numTracers, Vec *v, Vec *ut) { PetscErrorCode ierr; PetscInt itr, ip, nzloc, kl; PetscScalar myTime; #ifdef CARBON PetscInt itf; PetscScalar alpha; PetscScalar DICemp = 0.0, ALKemp = 0.0; PetscScalar localco2airseaflux = 0.0; #endif PetscScalar localburial = 0.0; myTime = DeltaT*Iter; /* Iter should start at 0 */ if (periodicBiogeochemForcing) { ierr = interpPeriodicVector(tc,&Ts,biogeochemCyclePeriod,numBiogeochemPeriods,tdpBiogeochem,&Tsp,"Ts_"); ierr = interpPeriodicVector(tc,&Ss,biogeochemCyclePeriod,numBiogeochemPeriods,tdpBiogeochem,&Ssp,"Ss_"); #ifdef READ_SWRAD ierr = interpPeriodicProfileSurfaceScalarData(tc,localswrad,biogeochemCyclePeriod,numBiogeochemPeriods, tdpBiogeochem,&localswradp,"swrad_"); #else insolation_(&lNumProfiles,&myTime,&locallatitude[0],&localswrad[0],&localtau[0]); #endif ierr = interpPeriodicProfileSurfaceScalarData(tc,localfice,biogeochemCyclePeriod,numBiogeochemPeriods, tdpBiogeochem,&localficep,"fice_"); ierr = interpPeriodicProfileSurfaceScalarData(tc,localwind,biogeochemCyclePeriod,numBiogeochemPeriods, tdpBiogeochem,&localwindp,"wind_"); ierr = interpPeriodicProfileSurfaceScalarData(tc,localatmosp,biogeochemCyclePeriod,numBiogeochemPeriods, tdpBiogeochem,&localatmospp,"atmosp_"); #ifdef CARBON ierr = interpPeriodicProfileSurfaceScalarData(tc,localEmP,biogeochemCyclePeriod,numBiogeochemPeriods, tdpBiogeochem,&localEmPp,"EmP_"); #endif } #ifdef CARBON if (useAtmModel) { } else { /* Interpolate atmospheric pCO2 */ if (!fixedAtmosCO2) { if (tc>=TpCO2atm_hist[0]) { ierr = calcInterpFactor(numpCO2atm_hist,tc,TpCO2atm_hist,&itf,&alpha); CHKERRQ(ierr); pCO2atm = alpha*pCO2atm_hist[itf] + (1.0-alpha)*pCO2atm_hist[itf+1]; } else { ierr = PetscPrintf(PETSC_COMM_WORLD,"Warning: time < %10.5f. Assuming pCO2atm=%g\n",TpCO2atm_hist[0],pCO2atm);CHKERRQ(ierr); } } } if (useVirtualFlux) { /* use the global surface mean value to calculate E-P contribution */ ierr = VecDot(surfVolFrac,DIC,&DICemp);CHKERRQ(ierr); /* volume weighted mean surface DIC */ ierr = VecDot(surfVolFrac,ALK,&ALKemp);CHKERRQ(ierr); /* volume weighted mean surface ALK */ } #endif for (ip=0; ip<lNumProfiles; ip++) { nzloc=lProfileLength[ip]; kl=lStartIndices[ip]; #ifdef CARBON if (!useVirtualFlux) { /* use the local surface value to calculate E-P contribution */ DICemp=localDIC[kl]; ALKemp=localALK[kl]; } #endif for (itr=0; itr<numTracers; itr++) { mops_biogeochem_copy_data_(&nzloc,&itr,&localTR[itr][kl],&localJTR[itr][kl],&DeltaT,&toModel); } mops_biogeochem_model_(&nzloc,&DeltaT, #ifdef CARBON &DICemp,&ALKemp,&localEmP[ip],&pCO2atm, #endif &localTs[kl],&localSs[kl],&localfice[ip],&localswrad[ip],&localtau[ip],&localwind[ip],&localatmosp[ip],&localdz[kl], #ifdef CARBON &localph[ip],&localco2airseaflux, #endif &localburial,&GRunoff,&localrunoffvol[kl], &useSeparateBiogeochemTimeStepping); for (itr=0; itr<numTracers; itr++) { mops_biogeochem_copy_data_(&nzloc,&itr,&localTR[itr][kl],&localJTR[itr][kl],&DeltaT,&fromModel); } #ifdef CARBON if (useAtmModel) { localFocean = localFocean + (localco2airseaflux/DeltaT)*localdA[ip]*(12.0/1.e18)*secPerYear; /* PgC/y */ } #endif /* integrate burial in sediment over area and all profiles on each processor */ localFburial = localFburial + localburial*localdA[ip]; if (calcDiagnostics) { if (Iter0+iLoop>=diagStartTimeStep) { /* start time averaging (note: diagStartTimeStep is ABSOLUTE time step) */ mops_biogeochem_diagnostics_(&nzloc,&localfbgc1[kl],&localfbgc2[kl],&localfbgc3[kl],&localfbgc4[kl],&localfbgc5[kl],&localfbgc6[kl],&localfbgc7[kl]); #ifdef CARBON localco2airseafluxdiag[ip]=localco2airseaflux; #endif } } } /* end loop over profiles */ #ifdef CARBON if (useAtmModel) { if ((iLoop % atmModelUpdateTimeSteps)==0) { /* time to update atmosphere */ Focean = 0.0; MPI_Allreduce(&localFocean, &Focean, 1, MPI_DOUBLE, MPI_SUM, PETSC_COMM_WORLD); /* time step atmosphere */ Focean = Focean/atmModelUpdateTimeSteps; /* average flux over accumulation period Pg C/yr*/ pCO2atm = pCO2atm + atmModelDeltaT*(-Focean)/ppmToPgC; /* reset values */ localFocean = 0.0; Foceanint = Foceanint + atmModelDeltaT*Focean; /* calculate the time integrated flux */ /* Focean = 0.0; */ } } #endif /* sum burial in sediment over all processors, and scale by time step etc.*/ /* do this only once every burialSumSteps , and then take this value for next year's runoff */ if ((iLoop % burialSumSteps)==0) { Fburial = 0.0; MPI_Allreduce(&localFburial, &Fburial, 1, MPI_DOUBLE, MPI_SUM, PETSC_COMM_WORLD); #ifdef RUNOFF GRunoff = Fburial/(1.e12*burialSumSteps)*(86400.0/DeltaT); /* This is Gmol P/day. Note: localrunoff is scaled with 1e12. Note: GRunoff will be scaled with bgc_dt.*/ #else GRunoff = Fburial/(totalA*burialSumSteps)*(86400.0/DeltaT); /* This is mmol P/m2/day. Note: this will later be divided by depth of first layer. Note: GRunoff will be scaled with bgc_dt.*/ #endif localFburial = 0.0; } if (useSeparateBiogeochemTimeStepping) { /* return updated tracer field */ for (itr=0; itr<numTracers; itr++) { ierr = VecSetValues(v[itr],lSize,gIndices,localTR[itr],INSERT_VALUES);CHKERRQ(ierr); ierr = VecAssemblyBegin(v[itr]);CHKERRQ(ierr); ierr = VecAssemblyEnd(v[itr]);CHKERRQ(ierr); } } else { for (itr=0; itr<numTracers; itr++) { ierr = VecSetValues(ut[itr],lSize,gIndices,localJTR[itr],INSERT_VALUES);CHKERRQ(ierr); ierr = VecAssemblyBegin(ut[itr]);CHKERRQ(ierr); ierr = VecAssemblyEnd(ut[itr]);CHKERRQ(ierr); } #if defined (FORSPINUP) || defined (FORJACOBIAN) /* add relaxation term: ut = ut - lambda*(v-vr) = ut -lambda*v + lambda*vr */ if (relaxTracer) { for (itr=0; itr<numTracers; itr++) { ierr = VecAXPY(ut[itr],-relaxLambda[itr],v[itr]);CHKERRQ(ierr); /* ut = ut - lambda*v */ ierr = VecShift(ut[itr],relaxLambda[itr]*relaxValue[itr]);CHKERRQ(ierr); /* ut = ut + lambda*vr */ } } #endif /* Convert to discrete tendency */ for (itr=0; itr<numTracers; itr++) { ierr = VecScale(ut[itr],DeltaT);CHKERRQ(ierr); } } if (calcDiagnostics) { if (Iter0+iLoop>=diagStartTimeStep) { /* start time averaging (note: diagStartTimeStep is ABSOLUTE time step) */ ierr = VecSetValues(fbgc1,lSize,gIndices,localfbgc1,INSERT_VALUES);CHKERRQ(ierr); ierr = VecAssemblyBegin(fbgc1);CHKERRQ(ierr); ierr = VecAssemblyEnd(fbgc1);CHKERRQ(ierr); ierr = VecSetValues(fbgc2,lSize,gIndices,localfbgc2,INSERT_VALUES);CHKERRQ(ierr); ierr = VecAssemblyBegin(fbgc2);CHKERRQ(ierr); ierr = VecAssemblyEnd(fbgc2);CHKERRQ(ierr); ierr = VecSetValues(fbgc3,lSize,gIndices,localfbgc3,INSERT_VALUES);CHKERRQ(ierr); ierr = VecAssemblyBegin(fbgc3);CHKERRQ(ierr); ierr = VecAssemblyEnd(fbgc3);CHKERRQ(ierr); ierr = VecSetValues(fbgc4,lSize,gIndices,localfbgc4,INSERT_VALUES);CHKERRQ(ierr); ierr = VecAssemblyBegin(fbgc4);CHKERRQ(ierr); ierr = VecAssemblyEnd(fbgc4);CHKERRQ(ierr); ierr = VecSetValues(fbgc5,lSize,gIndices,localfbgc5,INSERT_VALUES);CHKERRQ(ierr); ierr = VecAssemblyBegin(fbgc5);CHKERRQ(ierr); ierr = VecAssemblyEnd(fbgc5);CHKERRQ(ierr); ierr = VecSetValues(fbgc6,lSize,gIndices,localfbgc6,INSERT_VALUES);CHKERRQ(ierr); ierr = VecAssemblyBegin(fbgc6);CHKERRQ(ierr); ierr = VecAssemblyEnd(fbgc6);CHKERRQ(ierr); ierr = VecSetValues(fbgc7,lSize,gIndices,localfbgc7,INSERT_VALUES);CHKERRQ(ierr); ierr = VecAssemblyBegin(fbgc7);CHKERRQ(ierr); ierr = VecAssemblyEnd(fbgc7);CHKERRQ(ierr); } } return 0; } /* -----------------------------------------------------------------------------------------------------------*/ /* -----------------------------------------------------------------------------------------------------------*/ /* -----------------------------------------------------------------------------------------------------------*/ /* -----------------------------------------------------------------------------------------------------------*/ #undef __FUNCT__ #define __FUNCT__ "writeExternalForcing" PetscErrorCode writeExternalForcing(PetscScalar tc, PetscInt iLoop, PetscInt numTracers, Vec *v, Vec *ut) { PetscErrorCode ierr; PetscInt ip; PetscScalar zero = 0.0, one = 1.0; /* Note: tc and iLoop are the time and step at the end of the current time step. */ #ifdef CARBON if (useAtmModel) { /* write instantaneous atmos model state */ if ((iLoop % atmWriteSteps)==0) { /* time to write out */ ierr = PetscPrintf(PETSC_COMM_WORLD,"Writing atmospheric model output at time %10.5f, step %d\n", tc, Iter0+iLoop);CHKERRQ(ierr); ierr = PetscFPrintf(PETSC_COMM_WORLD,atmfptime,"%d %10.5f\n",Iter0+iLoop,tc);CHKERRQ(ierr); ierr = writeBinaryScalarData("pCO2atm_output.bin",&pCO2atm,1,PETSC_TRUE); ierr = writeBinaryScalarData("Foceanint_output.bin",&Foceanint,1,PETSC_TRUE); Foceanint = 0.0; } } #endif if ((iLoop % burialSumSteps)==0) { /* time to write out */ ierr = PetscPrintf(PETSC_COMM_WORLD,"Writing runoff output at time %10.5f, step %d\n", tc, Iter0+iLoop);CHKERRQ(ierr); ierr = PetscFPrintf(PETSC_COMM_WORLD,runofffptime,"%d %10.5f\n",Iter0+iLoop,tc);CHKERRQ(ierr); ierr = writeBinaryScalarData("Grunoff_output.bin",&GRunoff,1,PETSC_TRUE); } if (calcDiagnostics) { if (Iter0+iLoop>=diagStartTimeStep) { /* start time averaging (note: diagStartTimeStep is ABSOLUTE time step) */ if (diagCount<=diagNumTimeSteps) { /* still within same averaging block so accumulate */ ierr = VecAXPY(fbgc1avg,one,fbgc1);CHKERRQ(ierr); ierr = VecAXPY(fbgc2avg,one,fbgc2);CHKERRQ(ierr); ierr = VecAXPY(fbgc3avg,one,fbgc3);CHKERRQ(ierr); ierr = VecAXPY(fbgc4avg,one,fbgc4);CHKERRQ(ierr); ierr = VecAXPY(fbgc5avg,one,fbgc5);CHKERRQ(ierr); ierr = VecAXPY(fbgc6avg,one,fbgc6);CHKERRQ(ierr); ierr = VecAXPY(fbgc7avg,one,fbgc7);CHKERRQ(ierr); #ifdef CARBON for (ip=0; ip<lNumProfiles; ip++) { localco2airseafluxdiagavg[ip]=localco2airseafluxdiag[ip]+localco2airseafluxdiagavg[ip]; } #endif diagCount = diagCount+1; } if (diagCount==diagNumTimeSteps) { /* time to write averages to file */ ierr = PetscPrintf(PETSC_COMM_WORLD,"Writing diagnostics time average at time %10.5f, step %d\n", tc, Iter0+iLoop);CHKERRQ(ierr); ierr = VecScale(fbgc1avg,1.0/diagCount);CHKERRQ(ierr); ierr = VecView(fbgc1avg,fdfbgc1avg);CHKERRQ(ierr); ierr = VecSet(fbgc1avg,zero); CHKERRQ(ierr); ierr = VecScale(fbgc2avg,1.0/diagCount);CHKERRQ(ierr); ierr = VecView(fbgc2avg,fdfbgc2avg);CHKERRQ(ierr); ierr = VecSet(fbgc2avg,zero); CHKERRQ(ierr); ierr = VecScale(fbgc3avg,1.0/diagCount);CHKERRQ(ierr); ierr = VecView(fbgc3avg,fdfbgc3avg);CHKERRQ(ierr); ierr = VecSet(fbgc3avg,zero); CHKERRQ(ierr); ierr = VecScale(fbgc4avg,1.0/diagCount);CHKERRQ(ierr); ierr = VecView(fbgc4avg,fdfbgc4avg);CHKERRQ(ierr); ierr = VecSet(fbgc4avg,zero); CHKERRQ(ierr); ierr = VecScale(fbgc5avg,1.0/diagCount);CHKERRQ(ierr); ierr = VecView(fbgc5avg,fdfbgc5avg);CHKERRQ(ierr); ierr = VecSet(fbgc5avg,zero); CHKERRQ(ierr); ierr = VecScale(fbgc6avg,1.0/diagCount);CHKERRQ(ierr); ierr = VecView(fbgc6avg,fdfbgc6avg);CHKERRQ(ierr); ierr = VecSet(fbgc6avg,zero); CHKERRQ(ierr); ierr = VecScale(fbgc7avg,1.0/diagCount);CHKERRQ(ierr); ierr = VecView(fbgc7avg,fdfbgc7avg);CHKERRQ(ierr); ierr = VecSet(fbgc7avg,zero); CHKERRQ(ierr); #ifdef CARBON for (ip=0; ip<lNumProfiles; ip++) { localco2airseafluxdiagavg[ip]=localco2airseafluxdiagavg[ip]/diagCount; } ierr = writeProfileSurfaceScalarData("co2airseaflux_surf.bin",localco2airseafluxdiagavg,1,appendDiagnostics); /* reset diagnostic arrays */ for (ip=0; ip<lNumProfiles; ip++) { localco2airseafluxdiagavg[ip]=0.0; } #endif appendDiagnostics=PETSC_TRUE; diagCount = 0; } } } return 0; } /* -----------------------------------------------------------------------------------------------------------*/ /* -----------------------------------------------------------------------------------------------------------*/ /* -----------------------------------------------------------------------------------------------------------*/ /* -----------------------------------------------------------------------------------------------------------*/ #undef __FUNCT__ #define __FUNCT__ "finalizeExternalForcing" PetscErrorCode finalizeExternalForcing(PetscScalar tc, PetscInt Iter, PetscInt numTracers) { PetscErrorCode ierr; /* write final pickup */ #ifdef CARBON if (useAtmModel) { /* write instantaneous atmos model state */ ierr = writeBinaryScalarData("pickup_pCO2atm.bin",&pCO2atm,1,PETSC_FALSE); } #endif ierr = writeBinaryScalarData("pickup_runoff.bin",&GRunoff,1,PETSC_FALSE); ierr = VecDestroy(&Ts);CHKERRQ(ierr); ierr = VecDestroy(&Ss);CHKERRQ(ierr); if (periodicBiogeochemForcing) { ierr = destroyPeriodicVec(&Tsp);CHKERRQ(ierr); ierr = destroyPeriodicVec(&Ssp);CHKERRQ(ierr); ierr = destroyPeriodicArray(&localficep);CHKERRQ(ierr); ierr = destroyPeriodicArray(&localwindp);CHKERRQ(ierr); ierr = destroyPeriodicArray(&localatmospp);CHKERRQ(ierr); #ifdef READ_SWRAD ierr = destroyPeriodicArray(&localswradp);CHKERRQ(ierr); #endif #ifdef CARBON ierr = destroyPeriodicArray(&localEmPp);CHKERRQ(ierr); #endif } #ifdef CARBON if (useVirtualFlux) { ierr = VecDestroy(&surfVolFrac);CHKERRQ(ierr); } #endif if (calcDiagnostics) { ierr = VecDestroy(&fbgc1);CHKERRQ(ierr); ierr = VecDestroy(&fbgc1avg);CHKERRQ(ierr); ierr = PetscViewerDestroy(&fdfbgc1avg);CHKERRQ(ierr); ierr = VecDestroy(&fbgc2);CHKERRQ(ierr); ierr = VecDestroy(&fbgc2avg);CHKERRQ(ierr); ierr = PetscViewerDestroy(&fdfbgc2avg);CHKERRQ(ierr); ierr = VecDestroy(&fbgc3);CHKERRQ(ierr); ierr = VecDestroy(&fbgc3avg);CHKERRQ(ierr); ierr = PetscViewerDestroy(&fdfbgc3avg);CHKERRQ(ierr); ierr = VecDestroy(&fbgc4);CHKERRQ(ierr); ierr = VecDestroy(&fbgc4avg);CHKERRQ(ierr); ierr = PetscViewerDestroy(&fdfbgc4avg);CHKERRQ(ierr); ierr = VecDestroy(&fbgc5);CHKERRQ(ierr); ierr = VecDestroy(&fbgc5avg);CHKERRQ(ierr); ierr = PetscViewerDestroy(&fdfbgc5avg);CHKERRQ(ierr); ierr = VecDestroy(&fbgc6);CHKERRQ(ierr); ierr = VecDestroy(&fbgc6avg);CHKERRQ(ierr); ierr = PetscViewerDestroy(&fdfbgc6avg);CHKERRQ(ierr); ierr = VecDestroy(&fbgc7);CHKERRQ(ierr); ierr = VecDestroy(&fbgc7avg);CHKERRQ(ierr); ierr = PetscViewerDestroy(&fdfbgc7avg);CHKERRQ(ierr); } #ifdef CARBON if (useAtmModel) { ierr = PetscFClose(PETSC_COMM_WORLD,atmfptime);CHKERRQ(ierr); } #endif ierr = PetscFClose(PETSC_COMM_WORLD,runofffptime);CHKERRQ(ierr); return 0; } /* -----------------------------------------------------------------------------------------------------------*/ /* -----------------------------------------------------------------------------------------------------------*/ /* -----------------------------------------------------------------------------------------------------------*/ /* -----------------------------------------------------------------------------------------------------------*/ #undef __FUNCT__ #define __FUNCT__ "reInitializeExternalForcing" PetscErrorCode reInitializeExternalForcing(PetscScalar tc, PetscInt Iter, PetscInt numTracers, Vec *v, Vec *ut) { PetscErrorCode ierr; PetscInt ip, kl, nzloc; PetscScalar myTime; PetscViewer fd; int fp; myTime = DeltaT*Iter; /* Iter should start at 0 */ if (periodicBiogeochemForcing) { ierr = interpPeriodicVector(tc,&Ts,biogeochemCyclePeriod,numBiogeochemPeriods,tdpBiogeochem,&Tsp,"Ts_"); ierr = interpPeriodicVector(tc,&Ss,biogeochemCyclePeriod,numBiogeochemPeriods,tdpBiogeochem,&Ssp,"Ss_"); #ifdef READ_SWRAD ierr = interpPeriodicProfileSurfaceScalarData(tc,localswrad,biogeochemCyclePeriod,numBiogeochemPeriods, tdpBiogeochem,&localswradp,"swrad_"); #else insolation_(&lNumProfiles,&myTime,&locallatitude[0],&localswrad[0],&localtau[0]); #endif ierr = interpPeriodicProfileSurfaceScalarData(tc,localfice,biogeochemCyclePeriod,numBiogeochemPeriods, tdpBiogeochem,&localficep,"fice_"); ierr = interpPeriodicProfileSurfaceScalarData(tc,localwind,biogeochemCyclePeriod,numBiogeochemPeriods, tdpBiogeochem,&localwindp,"wind_"); ierr = interpPeriodicProfileSurfaceScalarData(tc,localatmosp,biogeochemCyclePeriod,numBiogeochemPeriods, tdpBiogeochem,&localatmospp,"atmosp_"); #ifdef CARBON ierr = interpPeriodicProfileSurfaceScalarData(tc,localEmP,biogeochemCyclePeriod,numBiogeochemPeriods, tdpBiogeochem,&localEmPp,"EmP_"); #endif } if (readBGCParams) { ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,bgcParamsFile,FILE_MODE_READ,&fd);CHKERRQ(ierr); ierr = PetscViewerBinaryGetDescriptor(fd,&fp);CHKERRQ(ierr); ierr = PetscMalloc(numBGCParams*sizeof(PetscScalar),&bgcparams);CHKERRQ(ierr); ierr = PetscBinaryRead(fp,bgcparams,numBGCParams,NULL,PETSC_SCALAR);CHKERRQ(ierr); ierr = PetscViewerDestroy(&fd);CHKERRQ(ierr); mops_biogeochem_set_params_(&numBGCParams,&bgcparams[0]); for (ip=0; ip<lNumProfiles; ip++) { nzloc=lProfileLength[ip]; kl=lStartIndices[ip]; mops_biogeochem_ini_(&nzloc,&DeltaT, #ifdef CARBON &localph[ip], #endif &localTs[kl],&localSs[kl],&localdz[kl],&drF[0],&nzmax,&nzeuph, &numBiogeochemStepsPerOceanStep,&MYFALSE); } } return 0; }
<reponame>hugbed/OpenS3D // Copyright 2012 The Chromium Authors. All rights reserved. // Inspired by Chromium video capture interface // Simplified and stripped from internal base code #include "s3d/video/video_frame.h" namespace s3d { // static size_t VideoFrame::AllocationSize(VideoPixelFormat format, const Size& size) { return NumBytesPerPixel(format) * size.getWidth() * size.getHeight(); } // static size_t VideoFrame::NumBytesPerPixel(VideoPixelFormat format) { switch (format) { case VideoPixelFormat::ARGB: case VideoPixelFormat::BGRA: return 4; case VideoPixelFormat::UYVY: return 2; case VideoPixelFormat::RGB: case VideoPixelFormat::BGR: return 3; } return 0; } } // namespace s3d
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v4/services/merchant_center_link_service.proto package com.google.ads.googleads.v4.services; public final class MerchantCenterLinkServiceProto { private MerchantCenterLinkServiceProto() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistryLite registry) { } public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions( (com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_google_ads_googleads_v4_services_ListMerchantCenterLinksRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_ads_googleads_v4_services_ListMerchantCenterLinksRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_ads_googleads_v4_services_ListMerchantCenterLinksResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_ads_googleads_v4_services_ListMerchantCenterLinksResponse_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_ads_googleads_v4_services_GetMerchantCenterLinkRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_ads_googleads_v4_services_GetMerchantCenterLinkRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_ads_googleads_v4_services_MutateMerchantCenterLinkRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_ads_googleads_v4_services_MutateMerchantCenterLinkRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_ads_googleads_v4_services_MerchantCenterLinkOperation_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_ads_googleads_v4_services_MerchantCenterLinkOperation_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_ads_googleads_v4_services_MutateMerchantCenterLinkResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_ads_googleads_v4_services_MutateMerchantCenterLinkResponse_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_ads_googleads_v4_services_MutateMerchantCenterLinkResult_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_ads_googleads_v4_services_MutateMerchantCenterLinkResult_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\nCgoogle/ads/googleads/v4/services/merch" + "ant_center_link_service.proto\022 google.ad" + "s.googleads.v4.services\032<google/ads/goog" + "leads/v4/resources/merchant_center_link." + "proto\032\034google/api/annotations.proto\032\027goo" + "gle/api/client.proto\032\037google/api/field_b" + "ehavior.proto\032\031google/api/resource.proto" + "\032 google/protobuf/field_mask.proto\":\n\036Li" + "stMerchantCenterLinksRequest\022\030\n\013customer" + "_id\030\001 \001(\tB\003\340A\002\"w\n\037ListMerchantCenterLink" + "sResponse\022T\n\025merchant_center_links\030\001 \003(\013" + "25.google.ads.googleads.v4.resources.Mer" + "chantCenterLink\"j\n\034GetMerchantCenterLink" + "Request\022J\n\rresource_name\030\001 \001(\tB3\340A\002\372A-\n+" + "googleads.googleapis.com/MerchantCenterL" + "ink\"\222\001\n\037MutateMerchantCenterLinkRequest\022" + "\030\n\013customer_id\030\001 \001(\tB\003\340A\002\022U\n\toperation\030\002" + " \001(\0132=.google.ads.googleads.v4.services." + "MerchantCenterLinkOperationB\003\340A\002\"\266\001\n\033Mer" + "chantCenterLinkOperation\022/\n\013update_mask\030" + "\003 \001(\0132\032.google.protobuf.FieldMask\022G\n\006upd" + "ate\030\001 \001(\01325.google.ads.googleads.v4.reso" + "urces.MerchantCenterLinkH\000\022\020\n\006remove\030\002 \001" + "(\tH\000B\013\n\toperation\"t\n MutateMerchantCente" + "rLinkResponse\022P\n\006result\030\002 \001(\0132@.google.a" + "ds.googleads.v4.services.MutateMerchantC" + "enterLinkResult\"7\n\036MutateMerchantCenterL" + "inkResult\022\025\n\rresource_name\030\001 \001(\t2\203\006\n\031Mer" + "chantCenterLinkService\022\347\001\n\027ListMerchantC" + "enterLinks\022@.google.ads.googleads.v4.ser" + "vices.ListMerchantCenterLinksRequest\032A.g" + "oogle.ads.googleads.v4.services.ListMerc" + "hantCenterLinksResponse\"G\202\323\344\223\0023\0221/v4/cus" + "tomers/{customer_id=*}/merchantCenterLin" + "ks\332A\013customer_id\022\335\001\n\025GetMerchantCenterLi" + "nk\022>.google.ads.googleads.v4.services.Ge" + "tMerchantCenterLinkRequest\0325.google.ads." + "googleads.v4.resources.MerchantCenterLin" + "k\"M\202\323\344\223\0027\0225/v4/{resource_name=customers/" + "*/merchantCenterLinks/*}\332A\rresource_name" + "\022\376\001\n\030MutateMerchantCenterLink\022A.google.a" + "ds.googleads.v4.services.MutateMerchantC" + "enterLinkRequest\032B.google.ads.googleads." + "v4.services.MutateMerchantCenterLinkResp" + "onse\"[\202\323\344\223\002=\"8/v4/customers/{customer_id" + "=*}/merchantCenterLinks:mutate:\001*\332A\025cust" + "omer_id,operation\032\033\312A\030googleads.googleap" + "is.comB\205\002\n$com.google.ads.googleads.v4.s" + "ervicesB\036MerchantCenterLinkServiceProtoP" + "\001ZHgoogle.golang.org/genproto/googleapis" + "/ads/googleads/v4/services;services\242\002\003GA" + "A\252\002 Google.Ads.GoogleAds.V4.Services\312\002 G" + "oogle\\Ads\\GoogleAds\\V4\\Services\352\002$Google" + "::Ads::GoogleAds::V4::Servicesb\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.ads.googleads.v4.resources.MerchantCenterLinkProto.getDescriptor(), com.google.api.AnnotationsProto.getDescriptor(), com.google.api.ClientProto.getDescriptor(), com.google.api.FieldBehaviorProto.getDescriptor(), com.google.api.ResourceProto.getDescriptor(), com.google.protobuf.FieldMaskProto.getDescriptor(), }); internal_static_google_ads_googleads_v4_services_ListMerchantCenterLinksRequest_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_google_ads_googleads_v4_services_ListMerchantCenterLinksRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_ads_googleads_v4_services_ListMerchantCenterLinksRequest_descriptor, new java.lang.String[] { "CustomerId", }); internal_static_google_ads_googleads_v4_services_ListMerchantCenterLinksResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_google_ads_googleads_v4_services_ListMerchantCenterLinksResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_ads_googleads_v4_services_ListMerchantCenterLinksResponse_descriptor, new java.lang.String[] { "MerchantCenterLinks", }); internal_static_google_ads_googleads_v4_services_GetMerchantCenterLinkRequest_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_google_ads_googleads_v4_services_GetMerchantCenterLinkRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_ads_googleads_v4_services_GetMerchantCenterLinkRequest_descriptor, new java.lang.String[] { "ResourceName", }); internal_static_google_ads_googleads_v4_services_MutateMerchantCenterLinkRequest_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_google_ads_googleads_v4_services_MutateMerchantCenterLinkRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_ads_googleads_v4_services_MutateMerchantCenterLinkRequest_descriptor, new java.lang.String[] { "CustomerId", "Operation", }); internal_static_google_ads_googleads_v4_services_MerchantCenterLinkOperation_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_google_ads_googleads_v4_services_MerchantCenterLinkOperation_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_ads_googleads_v4_services_MerchantCenterLinkOperation_descriptor, new java.lang.String[] { "UpdateMask", "Update", "Remove", "Operation", }); internal_static_google_ads_googleads_v4_services_MutateMerchantCenterLinkResponse_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_google_ads_googleads_v4_services_MutateMerchantCenterLinkResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_ads_googleads_v4_services_MutateMerchantCenterLinkResponse_descriptor, new java.lang.String[] { "Result", }); internal_static_google_ads_googleads_v4_services_MutateMerchantCenterLinkResult_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_google_ads_googleads_v4_services_MutateMerchantCenterLinkResult_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_ads_googleads_v4_services_MutateMerchantCenterLinkResult_descriptor, new java.lang.String[] { "ResourceName", }); com.google.protobuf.ExtensionRegistry registry = com.google.protobuf.ExtensionRegistry.newInstance(); registry.add(com.google.api.ClientProto.defaultHost); registry.add(com.google.api.FieldBehaviorProto.fieldBehavior); registry.add(com.google.api.AnnotationsProto.http); registry.add(com.google.api.ClientProto.methodSignature); registry.add(com.google.api.ResourceProto.resourceReference); com.google.protobuf.Descriptors.FileDescriptor .internalUpdateFileDescriptor(descriptor, registry); com.google.ads.googleads.v4.resources.MerchantCenterLinkProto.getDescriptor(); com.google.api.AnnotationsProto.getDescriptor(); com.google.api.ClientProto.getDescriptor(); com.google.api.FieldBehaviorProto.getDescriptor(); com.google.api.ResourceProto.getDescriptor(); com.google.protobuf.FieldMaskProto.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
def save_plots(ds: tf.data.Dataset, num_images: int) -> None: for i, sample in enumerate(ds.take(num_images)): image, bboxes, labels = [tensor.numpy() for tensor in sample] image = image.astype(np.int32) for bbox, label in zip(bboxes, labels): x_min, y_min, width, height = [int(v) for v in bbox] cv2.rectangle( image, (x_min, y_min), (x_min + width, y_min + height), color=(0, 0, 255), thickness=2 ) cv2.putText( image, str(int(label)), (x_min, y_min - 5), cv2.FONT_HERSHEY_SIMPLEX, fontScale=0.7, color=(0, 0, 255), thickness=2 ) fig, ax = plt.subplots() ax.imshow(image) ax.set_axis_off() fig.set_tight_layout(True) fig.savefig(f"figure_{i}.png")
<gh_stars>10-100 import { Key, useState } from 'react'; import { cleanup, fireEvent, render, TestRenderer, act, } from '../../__test-utils__'; import { describeForwardRefToHTMLElement, describeHostElementClassNameAppendable, } from '../../__test-utils__/common'; import Tabs, { Tab, TabPane } from '.'; describe('<Tabs />', () => { afterEach(cleanup); describeForwardRefToHTMLElement( HTMLDivElement, (ref) => render( <Tabs ref={ref}> <TabPane tab={(<Tab>tab</Tab>)}> tabPane </TabPane> </Tabs>, ), ); describeHostElementClassNameAppendable( 'foo', (className) => render( <Tabs className={className}> <TabPane tab={(<Tab>tab</Tab>)}> tabPane </TabPane> </Tabs>, ), ); it('should bind host class', () => { const { getHostHTMLElement } = render( <Tabs> <TabPane tab={(<Tab>tab</Tab>)}> tabPane </TabPane> </Tabs>, ); const element = getHostHTMLElement(); expect(element.classList.contains('mzn-tabs')).toBeTruthy(); }); describe('element structure', () => { it('should extract tabs of tab panes to tab bar and render active pane', () => { const { getHostHTMLElement } = render( <Tabs> <TabPane tab={(<Tab>tab0</Tab>)}>tabPane0</TabPane> <TabPane tab={(<Tab>tab1</Tab>)}>tabPane1</TabPane> </Tabs>, ); const element = getHostHTMLElement(); const { firstElementChild: tabBarElement, lastElementChild: tabPaneElement, childElementCount, } = element; const { firstElementChild: tabsOverflowElement } = tabBarElement!; const { firstElementChild: tabsElement } = tabsOverflowElement!; expect(childElementCount).toBe(2); expect(tabsElement!.classList.contains('mzn-tabs__tabs')).toBeTruthy(); [...tabsElement!.children].forEach((child, index) => { expect(child.classList.contains('mzn-tabs__tab')).toBeTruthy(); expect(child.textContent).toBe(`tab${index}`); expect(child.textContent).toBe(`tab${index}`); }); expect(tabPaneElement!.classList.contains('mzn-tabs__pane')).toBeTruthy(); expect(tabPaneElement!.textContent).toBe('tabPane0'); }); describe('tab bar', () => { describe('prop: tabBarClassName', () => { it('should wrapped tab bar by overflow wrapper', () => { const { getHostHTMLElement } = render( <Tabs> <TabPane tab={(<Tab>tab</Tab>)}> tabPane </TabPane> </Tabs>, ); const element = getHostHTMLElement(); const { firstElementChild: tabBarElement } = element; const { firstElementChild: tabsOverflowElement } = tabBarElement!; expect(tabsOverflowElement!.classList.contains('mzn-tabs--overflow')).toBeTruthy(); }); it('should append tabBarClassName to className of tab bar', () => { const { getHostHTMLElement } = render( <Tabs tabBarClassName="foo"> <TabPane tab={(<Tab>tab</Tab>)}> tabPane </TabPane> </Tabs>, ); const element = getHostHTMLElement(); const { firstElementChild: tabBarElement } = element; expect(tabBarElement!.classList.contains('foo')).toBeTruthy(); }); }); }); }); describe('overflow', () => { Object.defineProperty(HTMLDivElement.prototype, 'scrollWidth', { configurable: true, value: 400 }); Object.defineProperty(HTMLDivElement.prototype, 'clientWidth', { configurable: true, value: 200 }); it('should render button on the right when overflow', () => { const { getHostHTMLElement } = render( <div style={{ width: 200 }}> <Tabs> {Array.from('ABCDEFG').map((tab) => ( <TabPane key={tab} tab={(<Tab>{tab}</Tab>)} > {tab} </TabPane> ))} </Tabs> </div>, ); const element = getHostHTMLElement(); const tabsOverflowElement = element.querySelector('.mzn-tabs--overflow'); const { lastElementChild: rightBtnElement } = tabsOverflowElement!; expect(rightBtnElement!.tagName.toLowerCase()).toBe('button'); expect(rightBtnElement!.getAttribute('aria-label')).toBe('scrollToRight'); expect(rightBtnElement!.classList.contains('mzn-tabs__scroll-btn')).toBeTruthy(); }); it('should render button on the left if scroll the tabs', () => { const { getHostHTMLElement } = render( <div style={{ width: 200 }}> <Tabs> {Array.from('ABCD').map((tab) => ( <TabPane key={tab} tab={(<Tab>{tab}</Tab>)} > {tab} </TabPane> ))} </Tabs> </div>, ); const element = getHostHTMLElement(); const tabsOverflowElement = element.querySelector('.mzn-tabs--overflow'); const tabsElement = element.querySelector('.mzn-tabs__tabs'); act(() => { tabsElement!.scrollLeft = 10; tabsElement!.dispatchEvent(new window.Event('scroll')); }); const { firstElementChild } = tabsOverflowElement!; expect(firstElementChild!.tagName.toLowerCase()).toBe('button'); expect(firstElementChild!.getAttribute('aria-label')).toBe('scrollToLeft'); expect(firstElementChild!.classList.contains('mzn-tabs__scroll-btn')).toBeTruthy(); }); it('should scroll tabs to right when click', () => { const { getHostHTMLElement } = render( <div style={{ width: 200 }}> <Tabs> {Array.from('ABCDEFG').map((tab) => ( <TabPane key={tab} tab={(<Tab>{tab}</Tab>)} > {tab} </TabPane> ))} </Tabs> </div>, ); const element = getHostHTMLElement(); const tabsOverflowElement = element.querySelector('.mzn-tabs--overflow'); const tabsElement = element.querySelector('.mzn-tabs__tabs'); const { lastElementChild: rightBtnElement } = tabsOverflowElement!; const scrollEnd = tabsElement!.scrollWidth - tabsElement!.clientWidth; tabsElement!.scrollTo = jest.fn(); fireEvent.click(rightBtnElement!); act(() => { tabsElement!.scrollLeft = scrollEnd; tabsElement!.dispatchEvent(new window.Event('scroll')); }); expect(tabsElement!.scrollTo).toBeCalled(); expect(tabsElement!.scrollLeft).toBe(scrollEnd); }); it('should scroll tabs to left when click', () => { const { getHostHTMLElement } = render( <div style={{ width: 200 }}> <Tabs> {Array.from('ABCD').map((tab) => ( <TabPane key={tab} tab={(<Tab>{tab}</Tab>)} > {tab} </TabPane> ))} </Tabs> </div>, ); const element = getHostHTMLElement(); const tabsOverflowElement = element.querySelector('.mzn-tabs--overflow'); const tabsElement = element.querySelector('.mzn-tabs__tabs'); tabsElement!.scrollTo = jest.fn(); act(() => { tabsElement!.scrollLeft = 10; tabsElement!.dispatchEvent(new window.Event('scroll')); }); const { firstElementChild: leftBtnElement } = tabsOverflowElement!; fireEvent.click(leftBtnElement!); act(() => { tabsElement!.scrollLeft = 0; tabsElement!.dispatchEvent(new window.Event('scroll')); }); expect(tabsElement!.scrollTo).toBeCalled(); expect(tabsElement!.scrollLeft).toBe(0); }); }); describe('prop: onTabClick', () => { it('should be fired w/ key while some tab clicked', () => { const onTabClick = jest.fn(); const { getHostHTMLElement } = render( <Tabs onTabClick={onTabClick}> <TabPane key="foo" tab={(<Tab>tab1</Tab>)}> tabPane1 </TabPane> </Tabs>, ); const element = getHostHTMLElement(); const tabsElement = element.querySelector('.mzn-tabs__tabs'); const { firstElementChild: tabElement } = tabsElement!; fireEvent.click(tabElement!); expect(onTabClick).toBeCalledTimes(1); expect(onTabClick.mock.calls[0][0]).toBe('foo'); }); }); describe('prop: actions', () => { it('should render actions on the right side of tab bar', () => { const { getHostHTMLElement } = render( <Tabs actions={( <button type="button">action</button> )} > <TabPane key="foo" tab={(<Tab>tab1</Tab>)}> tabPane1 </TabPane> </Tabs>, ); const element = getHostHTMLElement(); const { firstElementChild: tabBarElement } = element; const { lastElementChild: actionElement } = tabBarElement!; expect(actionElement!.tagName.toLowerCase()).toBe('button'); expect(actionElement!.textContent).toBe('action'); }); }); it('should provide active to tab', () => { const testInstance = TestRenderer.create( <Tabs defaultActiveKey="bar"> <TabPane key="foo" tab={(<Tab>foo</Tab>)}>foo</TabPane> <TabPane key="bar" tab={(<Tab>bar</Tab>)}>bar</TabPane> </Tabs>, ); testInstance.root.findAllByType(Tab).forEach((tab, index) => { expect(tab.props.active).toBe(index === 1); }); }); describe('control', () => { function testActiveKey(ui: JSX.Element) { const testInstance = TestRenderer.create(ui); const [inactiveTabInstance, activeTabInstance] = testInstance.root.findAllByType(Tab); expect(inactiveTabInstance.props.active).toBeFalsy(); expect(activeTabInstance.props.active).toBeTruthy(); } it('should activate the tab which activeKey=key of its parent tab pane', () => { testActiveKey( <Tabs activeKey="1"> <TabPane key="0" tab={(<Tab>tab1</Tab>)}> tabPane1 </TabPane> <TabPane key="1" tab={(<Tab>tab2</Tab>)}> tabPane2 </TabPane> </Tabs>, ); }); it('should activate the tab which defaultActiveKey=key of its parent tab pane if activeKey not passed', () => { testActiveKey( <Tabs defaultActiveKey="1"> <TabPane key="0" tab={(<Tab>tab1</Tab>)}> tabPane1 </TabPane> <TabPane key="1" tab={(<Tab>tab2</Tab>)}> tabPane2 </TabPane> </Tabs>, ); }); it('should fire onChange while inactive tab clicked', () => { const onChange = jest.fn(); const { getHostHTMLElement } = render( <Tabs onChange={onChange}> <TabPane tab={(<Tab>tab1</Tab>)}> tabPane1 </TabPane> <TabPane tab={(<Tab>tab2</Tab>)}> tabPane2 </TabPane> </Tabs>, ); const element = getHostHTMLElement(); const tabsElement = element.querySelector('.mzn-tabs__tabs'); const { lastElementChild: inactiveTabElement } = tabsElement!; fireEvent.click(inactiveTabElement!); expect(onChange).toBeCalledTimes(1); expect(onChange.mock.calls[0][0]).toBe(1); }); function testControlled(ui: JSX.Element) { const { getHostHTMLElement } = render(ui); const element = getHostHTMLElement(); const tabsElement = element.querySelector('.mzn-tabs__tabs'); const { firstElementChild: activeTabElement, lastElementChild: inactiveTabElement } = tabsElement!; let tabPaneElement = element.lastElementChild!; expect(activeTabElement!.classList.contains('mzn-tabs__tab--active')).toBeTruthy(); expect(inactiveTabElement!.classList.contains('mzn-tabs__tab--active')).toBeFalsy(); expect(tabPaneElement.textContent).toBe('tabPane1'); fireEvent.click(inactiveTabElement!); tabPaneElement = element.lastElementChild!; expect(activeTabElement!.classList.contains('mzn-tabs__tab--active')).toBeFalsy(); expect(inactiveTabElement!.classList.contains('mzn-tabs__tab--active')).toBeTruthy(); expect(tabPaneElement.textContent).toBe('tabPane2'); } it('uncontrolled', () => { testControlled( <Tabs> <TabPane tab={(<Tab>tab1</Tab>)}> tabPane1 </TabPane> <TabPane tab={(<Tab>tab2</Tab>)}> tabPane2 </TabPane> </Tabs>, ); }); it('controlled', () => { const ControlledTabs = () => { const [tabKey, setTabKey] = useState<Key>('1'); return ( <Tabs activeKey={tabKey} onChange={setTabKey} > <TabPane key="1" tab={(<Tab>tab1</Tab>)}> tabPane1 </TabPane> <TabPane key="2" tab={(<Tab>tab2</Tab>)}> tabPane2 </TabPane> </Tabs> ); }; testControlled(<ControlledTabs />); }); }); });
// create a list of prediction inputs from double matrix private List<PredictionInput> createPIFromMatrix(double[][] m) { List<PredictionInput> pis = new ArrayList<>(); int[] shape = MatrixUtilsExtensions.getShape(m); for (int i = 0; i < shape[0]; i++) { List<Feature> fs = new ArrayList<>(); for (int j = 0; j < shape[1]; j++) { fs.add(FeatureFactory.newNumericalFeature("f", m[i][j])); } pis.add(new PredictionInput(fs)); } return pis; }
import pandas as pd import os from ..utils import NoCloudtrailException from isitfit.utils import logger from isitfit.cost.cloudtrail_iterator import dict2service class CloudtrailCached: def __init__(self, EndTime, cache_man, tqdmman): self.EndTime = EndTime self.tqdmman = tqdmman self.cache_man = cache_man def init_data(self, context_pre): # parse out of context ec2_instances, region_include, n_ec2 = context_pre['ec2_instances'], context_pre['region_include'], context_pre['n_ec2_total'] # get cloudtail ec2 type changes for all instances from isitfit.cost.cloudtrail_iterator import EventAggregatorPostprocessed eap = EventAggregatorPostprocessed(region_include, self.tqdmman, self.cache_man, self.EndTime) self.df_cloudtrail = eap.get(ec2_instances, n_ec2) # done return context_pre def single(self, context_ec2): ec2_dict = context_ec2['ec2_dict'] # imply service name ec2_dict['ServiceName'] = dict2service(ec2_dict) ServiceName = ec2_dict['ServiceName'] region_name = ec2_dict['Region'] sub_ct = self.df_cloudtrail sub_ct = sub_ct.loc[region_name] if sub_ct.shape[0]==0: raise NoCloudtrailException("No cloudtrail data #4 for %s"%ec2_id) sub_ct = sub_ct.loc[ServiceName] if sub_ct.shape[0]==0: raise NoCloudtrailException("No cloudtrail data #3 for %s"%ec2_id) # continue # ec2_obj = context_ec2['ec2_obj'] ec2_id = context_ec2['ec2_id'] # pandas series of number of cpu's available on the machine over time, past 90 days # series_type_ts1 = self.cloudtrail_client.get_ec2_type(ec2_obj.instance_id) if not ec2_id in sub_ct.index: raise NoCloudtrailException("No cloudtrail data #1 for %s"%ec2_id) df_type_ts1 = sub_ct.loc[ec2_id] if df_type_ts1 is None: raise NoCloudtrailException("No cloudtrail data #2 for %s"%ec2_id) # set in context context_ec2['df_type_ts1'] = df_type_ts1 # done return context_ec2