content
stringlengths
10
4.9M
Lossless image compression based on recursive nonlinear interpolation The generalized recursive interpolation (GRINT) algorithm was recently proposed and shown to be the most effective progressive technique for decorrelation of still image. A nonlinear version of GRINT (MRINT) employs median filtering in a nonseparable fashion on a quincunx grid. The main advantage of both these schemes is that interpolation is performed from all error-free values, thereby reducing the variance of interpolation errors. MRINT is embedded in a simplified version of the context-based encoder by Said and Pearlman. Coding performances of the novel context-based coder are evaluated by comparisons with GRINT, and a variety of other multiresolution lossless methods, including the original scheme by Said and Pearlman. The modified scheme outperforms all the other algorithms, including the latter, especially when dealing with medical images.
/** * Provides token credentials to the client. * * @param req The HTTP servlet request * @param res The HTTP servlet response. * * @throws OAuthException Should any OAuth related problem occur. * @throws IOException Should an error occur while writing the output stream. */ private static final void doProvideTokenCredentials(HttpServletRequest req, HttpServletResponse res) throws OAuthException, IOException { OAuthConfig configuration = OAuthConfig.getInstance(); OAuthRequest message = OAuthRequest.parse(req); message.checkRequired(OAuthParameter.TOKEN_CREDENTIALS_REQUIRED); String key = message.getOAuthParameter(OAuthParameter.oauth_consumer_key); OAuthClient client = configuration.manager().getByKey(key); if (client == null) throw new OAuthException(OAuthProblem.consumer_key_unknown); String method = message.getOAuthParameter(OAuthParameter.oauth_signature_method); String signature = message.getOAuthParameter(OAuthParameter.oauth_signature); String token = message.getOAuthParameter(OAuthParameter.oauth_token); OAuthTemporaryToken temporary = OAuthTokens.getTemporary(token); if (temporary == null) throw new OAuthException(OAuthProblem.token_rejected); if (temporary.hasExpired()) throw new OAuthException(OAuthProblem.token_expired); if (temporary.isUsed()) throw new OAuthException(OAuthProblem.token_used); temporary.marksAsUsed(); String baseString = message.toSignatureBaseString(); OAuthSigner signer = OAuthSignatures.newSigner(method); String signatureCheck = signer.getSignature(baseString, client.getCredentials().secret(), temporary.credentials().secret()); if (!Strings.equals(signature, signatureCheck)) throw new OAuthException(OAuthProblem.signature_invalid); OAuthAccessToken access = configuration.factory().newToken(client); configuration.listener().token(temporary, access, req); res.setContentType("application/x-www-form-urlencoded"); PrintWriter out = res.getWriter(); out.print("oauth_token=" + access.credentials().identifier() + "&oauth_token_secret="+ access.credentials().secret()); out.flush(); }
package e2e import ( "bytes" _ "embed" "errors" "fmt" "html/template" "math/rand" "os" "os/exec" "path/filepath" "sync" "testing" "time" . "github.com/onsi/ginkgo/v2" . "github.com/onsi/gomega" appsv1 "k8s.io/api/apps/v1" corev1 "k8s.io/api/core/v1" "sigs.k8s.io/yaml" ) var ( binDir string failedTest bool testNamespace string = "storage-capacity-prioritization-scheduler-test" //go:embed testdata/pod-pvc-template.yaml podPVCTemplateYAML string podPVCTemplateOnce sync.Once podPVCTmpl *template.Template ) func execAtLocal(cmd string, input []byte, args ...string) ([]byte, []byte, error) { var stdout, stderr bytes.Buffer command := exec.Command(cmd, args...) command.Stdout = &stdout command.Stderr = &stderr if len(input) != 0 { command.Stdin = bytes.NewReader(input) } err := command.Run() return stdout.Bytes(), stderr.Bytes(), err } func kubectl(args ...string) ([]byte, []byte, error) { return execAtLocal(filepath.Join(binDir, "kubectl"), nil, args...) } func kubectlWithInput(input []byte, args ...string) ([]byte, []byte, error) { return execAtLocal(filepath.Join(binDir, "kubectl"), input, args...) } func TestMtest(t *testing.T) { if os.Getenv("E2ETEST") == "" { t.Skip("Run under e2e/") return } rand.Seed(time.Now().UnixNano()) RegisterFailHandler(Fail) SetDefaultEventuallyPollingInterval(time.Second) SetDefaultEventuallyTimeout(5 * time.Minute) RunSpecs(t, "Test on sanity") } func createNamespace(ns string) { stdout, stderr, err := kubectl("create", "namespace", ns) Expect(err).ShouldNot(HaveOccurred(), "stdout=%s, stderr=%s", stdout, stderr) Eventually(func() error { return waitCreatingDefaultSA(ns) }).Should(Succeed()) } func waitCreatingDefaultSA(ns string) error { stdout, stderr, err := kubectl("get", "sa", "-n", ns, "default") if err != nil { return fmt.Errorf("default sa is not found. stdout=%s, stderr=%s, err=%v", stdout, stderr, err) } return nil } var _ = BeforeSuite(func() { By("[BeforeSuite] Getting the directory path which contains some binaries") binDir = os.Getenv("BINDIR") Expect(binDir).ShouldNot(BeEmpty()) fmt.Println("This test uses the binaries under " + binDir) By("[BeforeSuite] Waiting for storage-capacity-prioritization-scheduler to get ready") Eventually(func() error { stdout, stderr, err := kubectl("-n", "storage-capacity-prioritization-scheduler", "get", "deploy", "storage-capacity-prioritization-scheduler", "-o", "json") if err != nil { return errors.New(string(stderr)) } var deploy appsv1.Deployment err = yaml.Unmarshal(stdout, &deploy) if err != nil { return err } if deploy.Status.AvailableReplicas != 1 { return errors.New("storage-capacity-prioritization-scheduler is not available yet") } return nil }).Should(Succeed()) By("[BeforeSuite] Creating namespace for test") createNamespace(testNamespace) }) var _ = AfterSuite(func() { if !failedTest { By("[AfterSuite] Delete namespace for autoresizer tests") stdout, stderr, err := kubectl("delete", "namespace", testNamespace) Expect(err).ShouldNot(HaveOccurred(), "stdout=%s, stderr=%s", stdout, stderr) } }) type resource struct { resource string name string } var _ = Describe("storage_capacity_prioritization", func() { var resources []resource var _ = AfterEach(func() { By("[AfterEach] cleanup resources") for _, r := range resources { stdout, stderr, err := kubectl("-n", testNamespace, "delete", r.resource, r.name) Expect(err).ShouldNot(HaveOccurred(), "stdout=%s, stderr=%s", stdout, stderr) } }) It("should create a Pod with a PVC", func() { pvcName := "test-pvc" sc := "topolvm-provisioner" mode := string(corev1.PersistentVolumeFilesystem) request := "1Gi" limit := "2Gi" resources = createPodPVC(resources, pvcName, sc, mode, pvcName, request, limit) }) }) func createPodPVC(resources []resource, pvcName, storageClassName, volumeMode, podName, request, limit string) []resource { By("create a PVC and a pod for test") podPVCYAML, err := buildPodPVCTemplateYAML(pvcName, storageClassName, volumeMode, pvcName, request, limit) Expect(err).ShouldNot(HaveOccurred()) stdout, stderr, err := kubectlWithInput(podPVCYAML, "apply", "-f", "-") Expect(err).ShouldNot(HaveOccurred(), "stdout=%s, stderr=%s yaml=\n%s", stdout, stderr, podPVCYAML) resources = append(resources, resource{resource: "pod", name: pvcName}) resources = append(resources, resource{resource: "pvc", name: pvcName}) By("waiting for creating the volume and running the pod") Eventually(func() error { stdout, stderr, err := kubectl("get", "-n", testNamespace, "pod", pvcName, "-o", "yaml") if err != nil { return fmt.Errorf("failed to get pod name of %s/%s. stdout: %s, stderr: %s, err: %v", testNamespace, pvcName, stdout, stderr, err) } var pod corev1.Pod err = yaml.Unmarshal(stdout, &pod) if err != nil { return err } if pod.Status.Phase != corev1.PodRunning { return errors.New("Pod is not running") } return nil }).Should(Succeed()) return resources } func buildPodPVCTemplateYAML(pvcName, storageClassName, volumeMode, podName, request, limit string) ([]byte, error) { var b bytes.Buffer var err error podPVCTemplateOnce.Do(func() { podPVCTmpl, err = template.New("").Parse(podPVCTemplateYAML) }) if err != nil { return b.Bytes(), err } params := map[string]string{ "pvcName": pvcName, "storageClassName": storageClassName, "volumeMode": volumeMode, "podName": podName, "namespace": testNamespace, "resourceRequest": request, "resourceLimit": limit, } err = podPVCTmpl.Execute(&b, params) return b.Bytes(), err }
<filename>Encoder/IntraPredictor.cpp #include "IntraPredictor.h" #include <math.h> #include <stdio.h> #include <stdlib.h> #include <iostream> #include "Config.h" #define _CRTDBG_MAP_ALLOC #include <stdlib.h> #include <crtdbg.h> #define CALC_DIFF(x, y) ( x - y ) * ( x - y ) //#define CALC_DIFF(x, y) abs( x - y ) #define DEFAULT 128 #define LUM 16 #define CHROM 8 IntraPredictor::IntraPredictor() { pixels_up_luma = new pixel[16]; pixels_left_luma = new pixel[16]; pixels_up_cb = new pixel[8]; pixels_left_cb = new pixel[8]; pixels_up_cr = new pixel[8]; pixels_left_cr = new pixel[8]; } IntraPredictor::~IntraPredictor() { delete pixels_up_luma; delete pixels_left_luma; delete pixels_up_cb; delete pixels_left_cb; delete pixels_up_cr; delete pixels_left_cr; } void IntraPredictor::setCurrentFrame(Frame* frame) { current_frame = frame; } //alloceer geheugen voor de tussenwaardenmatrix void initTussMatrix(pixel** &tussw, int size) { tussw = new pixel*[size]; for (int i = 0; i < size-1; i++) { tussw[i] = new pixel[size]; } } //geef het geheugen van de tussenwaardenmatrix vrij void deleteTussMatrix(pixel** &tussw, int size) { for (int i = 0; i < size-1; i++) { delete[] tussw[i]; } delete[] tussw; } bool isSmallest(int test, int a, int b, int c) { if(test<=a && test<=b && test<=c) return true; return false; } int determineMode(int DCT, int horz, int vert, int diag) { if(isSmallest(DCT, horz, vert, diag)) return 0; if(isSmallest(horz, DCT, vert, diag)) return 1; if(isSmallest(vert, DCT, horz, diag)) return 2; if(isSmallest(diag, DCT, horz, vert)) return 3; } // Breng wijzigingen aan in onderstaande methode int IntraPredictor::predictIntra(int current_mb, int width, int height) { // get current macroblock Macroblock* mb = current_frame->getMacroblock(current_mb); // Haal de predictiepixels op uit omliggende macroblokken (links, boven, linksboven) // Indien de pixels niet beschikbaar zijn, gebruik de waarde 128 Macroblock *vert = NULL, *horz = NULL, *diag=NULL; bool vertini, horzini, diagini; //niet eerste kol vertini = false; if(current_mb%width>=0 && current_mb != 0) { vert = current_frame->getMacroblock(current_mb-1); } if(vert){ vertini = true; for (int i = 0; i < LUM-1; i++) { pixels_up_luma[i] = vert->luma[i][LUM-1]; } for (int i = 0; i < CHROM-1; i++) { pixels_up_cb[i] = vert->cb[i][CHROM-1]; pixels_up_cr[i] = vert->cr[i][CHROM-1]; } } //niet eerste rij horzini = false; if(current_mb >= width) { horz = current_frame->getMacroblock(current_mb-width); horzini = true; for (int i = 0; i < LUM-1; i++) { pixels_left_luma[i] = horz->luma[LUM-1][i]; } for (int i = 0; i < CHROM-1; i++) { pixels_left_cb[i] = vert->cb[CHROM-1][i]; pixels_left_cr[i] = vert->cr[CHROM-1][i]; } } //diag diagini = false; if((current_mb%current_frame->getWidth() > 0) && (current_mb >= width)) { diag = current_frame->getMacroblock(current_mb-width-1); diagini = true; pixel_up_left_luma = diag->luma[LUM-1][LUM-1]; pixel_up_left_cb = diag->cb[CHROM-1][CHROM-1]; pixel_up_left_cr = diag->cr[CHROM-1][CHROM-1]; } // Evalueer de verschillende predictiemodes (op basis van de luma-component) int mode = -1; int DCEn,horzEn,vertEn ,diagEn; pixel** prediction_lum = NULL; initTussMatrix(prediction_lum, LUM); //berekend de energie voor de Luma van elke predictiemehode DCEn = predDC(prediction_lum, horzini, pixels_left_luma, vertini, pixels_up_luma, LUM, true, mb->luma); horzEn = predHor(prediction_lum, horzini, pixels_left_luma, LUM, true, mb->luma); vertEn = predVer(prediction_lum, vertini, pixels_up_luma, LUM, true, mb->luma); diagEn = predDia(prediction_lum, horzini, pixels_left_luma, vertini, pixels_up_luma, diagini, pixel_up_left_luma, LUM, true, mb->luma); //bepaald de beste modus mode = determineMode(DCEn, horzEn, vertEn, diagEn); deleteTussMatrix(prediction_lum, LUM); // Bereken het residu voor de geselecteerde predictiemode (voor luma en chroma) pixel** prediction_res_lum = NULL; initTussMatrix(prediction_res_lum, LUM); pixel** prediction_res_cb = NULL; initTussMatrix(prediction_res_cb, CHROM); pixel** prediction_res_cr = NULL; initTussMatrix(prediction_res_cr, CHROM); //gebruikt deze mode switch (mode) { case 0: predDC(prediction_res_lum,horzini,pixels_left_luma,vertini,pixels_up_luma, LUM, false, mb->luma); predDC(prediction_res_cb,horzini,pixels_left_cb,vertini,pixels_up_cb, CHROM, false, mb->cb); predDC(prediction_res_cr,horzini,pixels_left_cr,vertini,pixels_up_cr, CHROM, false, mb->cr); break; case 1: predHor(prediction_res_lum, horzini, pixels_left_luma, LUM, false, mb->luma); predHor(prediction_res_cb, horzini, pixels_left_luma, CHROM, false, mb->cb); predHor(prediction_res_cr, horzini, pixels_left_cr, CHROM, false, mb->cr); break; case 2: predVer(prediction_res_lum, vertini, pixels_up_luma, LUM, false, mb->luma); predVer(prediction_res_cb, vertini, pixels_up_cb, CHROM, false, mb->cb); predVer(prediction_res_cr, vertini, pixels_up_cr, CHROM, false, mb->cr); break; case 3: predDia(prediction_res_lum, horzini, pixels_left_luma, vertini, pixels_up_luma, diagini, pixel_up_left_luma, LUM, false, mb->luma); predDia(prediction_res_cb, horzini, pixels_left_cb, vertini, pixels_up_cb, diagini, pixel_up_left_cb, CHROM, false, mb->cb); predDia(prediction_res_cr, horzini, pixels_left_cr, vertini, pixels_up_cr, diagini, pixel_up_left_cr, CHROM, false, mb->cr); break; default: break; } //past de predictie toe for (int i = 0; i < LUM-1; i++) { for (int j = 0; j < LUM-1; j++) { mb->luma[i][j] -= prediction_res_lum[i][j]; } } for (int i = 0; i < CHROM-1; i++) { for (int j = 0; j < CHROM-1; j++) { mb->cb[i][j] -= prediction_res_cb[i][j]; mb->cr[i][j] -= prediction_res_cr[i][j]; } } //geeft geheugen van de arrays vrij deleteTussMatrix(prediction_res_lum,LUM); deleteTussMatrix(prediction_res_cb,CHROM); deleteTussMatrix(prediction_res_cr, CHROM); return mode; // Optimale mode als return-waarde } int IntraPredictor::SSE(pixel** curr, pixel** residu, int size) { int sse = 0; for (int i = 0; i < size-1; i++) { for (int j = 0; j < size-1; j++) { int pow = curr[i][j] - residu[i][j]; sse += pow*pow; } } return sse; } int IntraPredictor::predDC(pixel** res, bool left, pixel* leftp, bool up, pixel* upp, int size, bool calc, pixel** current) { int left_energy = 0; int up_energy = 0; if(left){ for (int i = 0; i < size-1; i++) { left_energy += leftp[i]; } } else left_energy = DEFAULT; if(up){ for (int i = 0; i < size-1; i++) { up_energy += upp[i]; } } else up_energy = DEFAULT; for (int i = 0; i < size-1; i++) { for (int j = 0; j < size-1; j++) { res[i][j] = (pixel)((left_energy+up_energy)/(2*size)); } } if(calc) return(SSE(current, res, size)); return -1; } int IntraPredictor::predHor(pixel** res, bool left, pixel* leftp, int size, bool calc, pixel** current) { for (int i = 0; i < size-1; i++) { int horz; if(left) horz = leftp[i]; else horz = DEFAULT; for (int j = 0; j < size-1; j ++) { res[i][j] = horz; } } if(calc) return(SSE(current, res, size)); return -1; } int IntraPredictor::predVer(pixel** res, bool up, pixel* upp, int size, bool calc, pixel** current) { for (int i = 0; i < size-1; i++) { int vert; if(up) vert = upp[i]; else vert = DEFAULT; for (int j = 0; j < size-1; j++) { res[j][i] = vert; } } if(calc) return(SSE(current, res, size)); return -1; } int IntraPredictor::predDia(pixel** res, bool left, pixel* leftp, bool up, pixel* upp, bool upleft, pixel upleftp, int size, bool calc, pixel** current) { for (int i = 0; i < size-1; i++) { for (int j = 0; j < size-1; j++) { if((i<j)&&up) res[i][j] = upp[j-i-1]; else if((i>j)&&left) res[i][j] = leftp[i-j-1]; else if((i==j)&&upleft) res[i][j] = upleftp; else res[i][j] = DEFAULT; } } if(calc) return(SSE(current, res, size)); return -1; }
<filename>extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/reflection/GrpcServerIndex.java package io.quarkus.grpc.runtime.reflection; import static com.google.protobuf.Descriptors.FileDescriptor; import java.util.ArrayDeque; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.Set; import java.util.function.Function; import com.google.protobuf.Descriptors; import io.grpc.ServerServiceDefinition; import io.grpc.ServiceDescriptor; import io.grpc.protobuf.ProtoFileDescriptorSupplier; public class GrpcServerIndex { private final Set<String> names; private final Set<FileDescriptor> services; private final Map<String, FileDescriptor> descriptorsByName; private final Map<String, FileDescriptor> descriptorsBySymbol; private final Map<String, Map<Integer, FileDescriptor>> descriptorsByExtensionAndNumber; public GrpcServerIndex( List<ServerServiceDefinition> definitions) { Queue<FileDescriptor> fileDescriptorsToProcess = new ArrayDeque<>(); Set<String> files = new HashSet<>(); Set<String> names = new HashSet<>(); Set<FileDescriptor> services = new HashSet<>(); Map<String, FileDescriptor> descriptorsByName = new HashMap<>(); Map<String, FileDescriptor> descriptorsBySymbol = new HashMap<>(); Map<String, Map<Integer, FileDescriptor>> descriptorsByExtensionAndNumber = new HashMap<>(); // Collect the services for (ServerServiceDefinition definition : definitions) { ServiceDescriptor serviceDescriptor = definition.getServiceDescriptor(); if (serviceDescriptor.getSchemaDescriptor() instanceof ProtoFileDescriptorSupplier) { ProtoFileDescriptorSupplier supplier = (ProtoFileDescriptorSupplier) serviceDescriptor .getSchemaDescriptor(); FileDescriptor fd = supplier.getFileDescriptor(); String serviceName = serviceDescriptor.getName(); if (names.contains(serviceName)) { throw new IllegalStateException("Duplicated gRPC service: " + serviceName); } services.add(fd); names.add(serviceName); if (!files.contains(fd.getName())) { files.add(fd.getName()); fileDescriptorsToProcess.add(fd); } } } // Traverse the set of service and add dependencies while (!fileDescriptorsToProcess.isEmpty()) { FileDescriptor fd = fileDescriptorsToProcess.remove(); processFileDescriptor(fd, descriptorsByName, descriptorsBySymbol, descriptorsByExtensionAndNumber); for (FileDescriptor dep : fd.getDependencies()) { if (!files.contains(dep.getName())) { files.add(dep.getName()); fileDescriptorsToProcess.add(dep); } } } this.services = Collections.unmodifiableSet(services); this.descriptorsByName = Collections.unmodifiableMap(descriptorsByName); this.descriptorsByExtensionAndNumber = Collections.unmodifiableMap(descriptorsByExtensionAndNumber); this.descriptorsBySymbol = Collections.unmodifiableMap(descriptorsBySymbol); this.names = Collections.unmodifiableSet(names); } public Set<String> getServiceNames() { return names; } public FileDescriptor getFileDescriptorByName(String name) { return descriptorsByName.get(name); } public FileDescriptor getFileDescriptorBySymbol(String symbol) { return descriptorsBySymbol.get(symbol); } public FileDescriptor getFileDescriptorByExtensionAndNumber(String type, int number) { Map<Integer, FileDescriptor> map = descriptorsByExtensionAndNumber .getOrDefault(type, Collections.emptyMap()); return map.get(number); } public Set<Integer> getExtensionNumbersOfType(String type) { return descriptorsByExtensionAndNumber.getOrDefault(type, Collections.emptyMap()).keySet(); } private void processFileDescriptor(FileDescriptor fd, Map<String, FileDescriptor> descriptorsByName, Map<String, FileDescriptor> descriptorsBySymbol, Map<String, Map<Integer, FileDescriptor>> descriptorsByExtensionAndNumber) { String name = fd.getName(); if (descriptorsByName.containsKey(name)) { throw new IllegalStateException("File name already used: " + name); } descriptorsByName.put(name, fd); for (Descriptors.ServiceDescriptor service : fd.getServices()) { processService(service, fd, descriptorsBySymbol); } for (Descriptors.Descriptor type : fd.getMessageTypes()) { processType(type, fd, descriptorsBySymbol, descriptorsByExtensionAndNumber); } for (Descriptors.FieldDescriptor extension : fd.getExtensions()) { processExtension(extension, fd, descriptorsByExtensionAndNumber); } } private void processService(Descriptors.ServiceDescriptor service, FileDescriptor fd, Map<String, FileDescriptor> descriptorsBySymbol) { String fullyQualifiedServiceName = service.getFullName(); if (descriptorsBySymbol.containsKey(fullyQualifiedServiceName)) { throw new IllegalStateException("Service already defined: " + fullyQualifiedServiceName); } descriptorsBySymbol.put(fullyQualifiedServiceName, fd); for (Descriptors.MethodDescriptor method : service.getMethods()) { String fullyQualifiedMethodName = method.getFullName(); if (descriptorsBySymbol.containsKey(fullyQualifiedMethodName)) { throw new IllegalStateException( "Method already defined: " + fullyQualifiedMethodName + " in " + fullyQualifiedServiceName); } descriptorsBySymbol.put(fullyQualifiedMethodName, fd); } } private void processType(Descriptors.Descriptor type, FileDescriptor fd, Map<String, FileDescriptor> descriptorsBySymbol, Map<String, Map<Integer, FileDescriptor>> descriptorsByExtensionAndNumber) { String fullyQualifiedTypeName = type.getFullName(); if (descriptorsBySymbol.containsKey(fullyQualifiedTypeName)) { throw new IllegalStateException("Type already defined: " + fullyQualifiedTypeName); } descriptorsBySymbol.put(fullyQualifiedTypeName, fd); for (Descriptors.FieldDescriptor extension : type.getExtensions()) { processExtension(extension, fd, descriptorsByExtensionAndNumber); } for (Descriptors.Descriptor nestedType : type.getNestedTypes()) { processType(nestedType, fd, descriptorsBySymbol, descriptorsByExtensionAndNumber); } } private void processExtension(Descriptors.FieldDescriptor extension, FileDescriptor fd, Map<String, Map<Integer, FileDescriptor>> descriptorsByExtensionAndNumber) { String extensionName = extension.getContainingType().getFullName(); int extensionNumber = extension.getNumber(); descriptorsByExtensionAndNumber.computeIfAbsent(extensionName, new Function<String, Map<Integer, FileDescriptor>>() { @Override public Map<Integer, FileDescriptor> apply(String s) { return new HashMap<>(); } }); if (descriptorsByExtensionAndNumber.get(extensionName).containsKey(extensionNumber)) { throw new IllegalStateException( "Extension name " + extensionName + " and number " + extensionNumber + " are already defined"); } descriptorsByExtensionAndNumber.get(extensionName).put(extensionNumber, fd); } }
import collections a,b=map(int,input().split()) c=[list(map(int,input().split())) for i in range(a)] result=[] for i in range(a): n=c[i][1:] result+=n ans=collections.Counter(result) ans1,ans2=zip(*ans.most_common()) ans2=list(ans2) print(ans2.count(a))
<filename>blob_store/dav_blob_store/blob_store_verifier.go package dav_blob_store import ( "fmt" "net/http" "net/url" config_package "github.com/cloudfoundry-incubator/ltc/config" ) type Verifier struct{} func (Verifier) Verify(config *config_package.Config) (authorized bool, err error) { blobStoreURL := url.URL{ Scheme: "http", Host: fmt.Sprintf("%s:%s", config.BlobStore().Host, config.BlobStore().Port), User: url.UserPassword(config.BlobStore().Username, config.BlobStore().Password), } baseURL := &url.URL{ Scheme: blobStoreURL.Scheme, Host: blobStoreURL.Host, User: blobStoreURL.User, Path: "/blobs/", } req, err := http.NewRequest("PROPFIND", baseURL.String(), nil) if err != nil { return false, err } req.Header.Add("Depth", "1") resp, err := http.DefaultClient.Do(req) if err != nil { return false, err } defer resp.Body.Close() return resp.StatusCode == 207, err }
// Copyright 2018 The Oppia Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS-IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Component for the classroom page. */ import { Component } from '@angular/core'; import { downgradeComponent } from '@angular/upgrade/static'; import { AppConstants } from 'app.constants'; import { ClassroomBackendApiService } from 'domain/classroom/classroom-backend-api.service'; import { ClassroomData } from 'domain/classroom/classroom-data.model'; import { UrlInterpolationService } from 'domain/utilities/url-interpolation.service'; import { CapitalizePipe } from 'filters/string-utility-filters/capitalize.pipe'; import { AccessValidationBackendApiService } from 'pages/oppia-root/routing/access-validation-backend-api.service'; import { AlertsService } from 'services/alerts.service'; import { UrlService } from 'services/contextual/url.service'; import { WindowRef } from 'services/contextual/window-ref.service'; import { LoaderService } from 'services/loader.service'; import { PageTitleService } from 'services/page-title.service'; import { SiteAnalyticsService } from 'services/site-analytics.service'; @Component({ selector: 'oppia-classroom-page', templateUrl: './classroom-page.component.html' }) export class ClassroomPageComponent { // These properties are initialized using Angular lifecycle hooks // and we need to do non-null assertion, for more information see // https://github.com/oppia/oppia/wiki/Guide-on-defining-types#ts-7-1 classroomDisplayName!: string; classroomUrlFragment!: string; bannerImageFileUrl!: string; classroomData!: ClassroomData; constructor( private accessValidationBackendApiService: AccessValidationBackendApiService, private alertsService: AlertsService, private capitalizePipe: CapitalizePipe, private classroomBackendApiService: ClassroomBackendApiService, private loaderService: LoaderService, private pageTitleService: PageTitleService, private siteAnalyticsService: SiteAnalyticsService, private urlInterpolationService: UrlInterpolationService, private urlService: UrlService, private windowRef: WindowRef ) {} ngOnInit(): void { this.classroomUrlFragment = ( this.urlService.getClassroomUrlFragmentFromUrl()); this.bannerImageFileUrl = this.urlInterpolationService.getStaticImageUrl( '/splash/books.svg'); this.loaderService.showLoadingScreen('Loading'); this.accessValidationBackendApiService.validateAccessToClassroomPage( this.classroomUrlFragment).then(() => { this.classroomBackendApiService.fetchClassroomDataAsync( this.classroomUrlFragment).then((classroomData) => { this.classroomData = classroomData; this.classroomDisplayName = this.capitalizePipe.transform( classroomData.getName()); this.pageTitleService.setPageTitle( `Learn ${this.classroomDisplayName} with Oppia | Oppia`); this.loaderService.hideLoadingScreen(); this.classroomBackendApiService.onInitializeTranslation.emit(); this.siteAnalyticsService.registerClassroomPageViewed(); }, (errorResponse) => { if (AppConstants.FATAL_ERROR_CODES.indexOf( errorResponse.status) !== -1) { this.alertsService.addWarning('Failed to get dashboard data'); } }); }, (err) => { // User provided classroom doesnot exist. Redirect to default classroom. this.windowRef.nativeWindow.history.pushState( null, 'classroom', AppConstants.DEFAULT_CLASSROOM_URL_FRAGMENT); this.ngOnInit(); }); } getStaticImageUrl(imagePath: string): string { return this.urlInterpolationService.getStaticImageUrl(imagePath); } } angular.module('oppia').directive('oppiaClassroomPage', downgradeComponent({ component: ClassroomPageComponent }) as angular.IDirectiveFactory);
The Role of Vibrio cholerae Haemagglutinin Protease (HAP) in Extra-Intestinal Infection. INTRODUCTION Based on the diversity of surface O antigen Vibrio cholerae can be classified into 206 serogroups. Vibrio cholerae is the causative agent of cholera and extra intestinal infections like, septicemia, wound infection and haemorrhagic reactions. Pathogenic factors of V. cholerae extra-intestinal infection are yet to be explored. AIM To identify the pathogenic factor associated with V. cholerae extra-intestinal infection. MATERIALS AND METHODS This study was carried out between April, 2007 to October 2007 in National Institute of Cholera and Enteric Diseases (NICED). Haemagglutinin Protease (HAP), a major secreted proteolytic enzyme, was purified from the culture supernatant of Vibrio cholerae O1 strain C6709 after removal of outer membrane vesicles using a single step ion-exchange chromatography. Function of HAP was characterized by animal model, like, subcutaneous mouse assay, basement membrane component's degradation assays and tissue culture assays. RESULT When suckling mouse was subcutaneously injected with culture supernatant of C6709 strain or purified HAP in both cases, distinct in vivo haemorrhagic response along with histopathological changes like necrosis of the capillaries and muscle layer, acute myofibre degeneration as well as moderate number of erythrocyte scattered through the skin, capillary necrosis, acute myofiber degeneration and necrosis of muscle layer were found. When Tryptic Soy Broth (TSB) media was used, the haemorrhagic effects in suckling mouse were not detectable. The major protein components, laminin and collagen, of basement membrane comprising of vascular endothelial cells, were degraded by HAP. Purified HAP showed cell rounding effects on Int 407 cells. CONCLUSION Result indicates that HAP may be a causative agent of Vibrio cholerae mediated extra-intestinal infection. This study confirms that Vibrio cholera as a sole pathogen can cause the extra-intestinal infection. This information is important for public health notification. Besides this, result indicates appropriate testing for Vibrio cholerae and intervention are important for the patient management.
/** * Get the next AnalyisEngine that should receive the CAS. */ @Override public Step next() throws AnalysisEngineProcessException { CAS cas = getCas(); Iterator componentIter = mComponentInfo.iterator(); while (componentIter.hasNext()) { ComponentInfo componentInfo = (ComponentInfo) componentIter.next(); if (!mAlreadyCalled.contains(componentInfo.key)) { boolean satisfied = false; for (int i = 0; i < componentInfo.inputTypesByCapability.length; i++) { satisfied = casContainsTypes(cas, componentInfo.inputTypesByCapability[i]); if (satisfied) break; } if (satisfied) { mAlreadyCalled.add(componentInfo.key); if (mLogger.isLoggable(Level.FINEST)) { getContext().getLogger().log(Level.FINEST, "Next AE is: " + componentInfo.key); } return new SimpleStep(componentInfo.key); } } } getContext().getLogger().log(Level.FINEST, "Flow Complete."); return new FinalStep(); }
def era5_pos(lon, lat): if abs(lat) > 90 or abs(lon) > 180: raise ValueError('The given coordinates ({}, {}) '.format(lon, lat) + 'do not fit to the available data range.') dx = 180 + lon dx = float(round(dx * 4) / 4) lat = float(round(lat * 4) / 4) return dx, lat
export declare function equals(one: any, other: any): boolean;
A Freeport man is accused of assaulting a state trooper during a traffic stop on the Southern State Parkway, and police are crediting two passing drivers for stopping and coming to the trooper's aid. State police say the suspect, Lenox Pascal, was pulled over just before 10 p.m. Friday near the Eagle Avenue exit in Hempstead for driving erratically. When the trooper attempted to arrest Pascal on DWI charges, police say Pascal resisted arrest and assaulted the trooper. Two passing drivers stopped to help, and Pascal was taken into custody. The trooper was treated for minor injuries and is expected to be OK. Police say Pascal had a suspended license for a prior DWI conviction. He's facing multiple charges, including felony assault on a police officer, aggravated driving while intoxicated, resisting arrest and reckless endangerment. In a police release, officials commended the motorists who stopped. "This trooper was engaged in a life-threatening situation on a heavily traveled parkway. These two good Samaritans jeopardized their own safety to come to the aid of the trooper," New York State Police Major David Candelaria, Troop L Commander, said in a statement.
<filename>src/clr_rom_src/main.cpp<gh_stars>0 // main.cpp // //MIT License //THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR //IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, //FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE //AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER //LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, //OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN //THE SOFTWARE. // // Oculus Rift : TM & Copyright Oculus VR, Inc. All Rights Reserved // Wizapply Framework : (C)Wizapply.com /* -- Include files -------------------------------------------- */ #include <ovrvision_pro.h> //Ovrvision SDK /* -- Macro definition ------------------------------------------------------- */ /* -- Global variable definition ----------------------------------------------- */ //Objects OVR::OvrvisionPro* g_pOvrvision; /* -- Function prototype ------------------------------------------------- */ /* -- Functions ------------------------------------------------------------- */ int main(int argc, const char **argv) { printf("OvrvisionPro : EEPROM user data elimination Tool\n"); //Create Ovrvision object g_pOvrvision = new OVR::OvrvisionPro(); if (g_pOvrvision->Open(0, OVR::OV_CAMVR_VGA) == 0) { //Open printf("Ovrvision Pro Open Error!\nPlease check whether OvrvisionPro is connected.\n"); return 0; } printf("The EEPROM data of OvrvisionPro is eliminated.\n"); g_pOvrvision->CameraParamResetEEPROM(); printf("Elimination was completed.\n"); //Wait getchar(); delete g_pOvrvision; return 0; } //EOF
Rush Limbaugh laughed about Japanese refugees recycling after the earthquake that struck the country on his Tuesday show. A caller asked Limbaugh, "If these are the people that invented the Prius, have mastered public transportation, recycling, why did Mother Earth, Gaia if you will, hit them with this disaster?" Limbaugh called this an "interesting question," and played a clip of ABC's Diane Sawyer reporting from a shelter in Japan. In the clip, Sawyer is surprised that the refugees in the shelter have maintained a recycling program. Limbaugh first mocked Sawyer, doing an impression of her and saying that "she sounds like she saw her husband for the first time in six months." He then turned to his caller's question. "He's right," Limbaugh said. "They've given us the Prius. Even now, refugees are recycling their garbage." Here, he began to laugh, continuing, "and yet, Gaia levels them! Just wipes them out!" This angle has been something of a theme for Limbaugh; last week, he wondered if environmentalists would "cheer" the quake. Watch (via Media Matters):
// Copyright (c) 2009-2010 <NAME> // Copyright (c) 2011 The Bitcoin developers // Distributed under the MIT/X11 software license, see the accompanying // file license.txt or http://www.opensource.org/licenses/mit-license.php. #ifndef RIPPLE_UINT256_H #define RIPPLE_UINT256_H #include <algorithm> #include <climits> #include <string> #include <vector> #include <cstdio> #include <cstring> #include <cassert> #include <boost/functional/hash.hpp> #include "types.h" #include "utils.h" #if defined(_MSC_VER) && _MSC_VER < 1300 #define for if (false) ; else for #endif // These classes all store their values internally // in big-endian form inline int Testuint256AdHoc(std::vector<std::string> vArg); // We have to keep a separate base class without constructors // so the compiler will let us use it in a union template<unsigned int BITS> class base_uint { protected: enum { WIDTH=BITS/32 }; // This is really big-endian in byte order. // We sometimes use unsigned int for speed. unsigned int pn[WIDTH]; public: bool isZero() const { for (int i = 0; i < WIDTH; i++) if (pn[i] != 0) return false; return true; } bool isNonZero() const { return !isZero(); } bool operator!() const { return isZero(); } const base_uint operator~() const { base_uint ret; for (int i = 0; i < WIDTH; i++) ret.pn[i] = ~pn[i]; return ret; } base_uint& operator=(uint64 uHost) { zero(); // Put in least significant bits. ((uint64*) end())[-1] = htobe64(uHost); return *this; } base_uint& operator^=(const base_uint& b) { for (int i = 0; i < WIDTH; i++) pn[i] ^= b.pn[i]; return *this; } base_uint& operator&=(const base_uint& b) { for (int i = 0; i < WIDTH; i++) pn[i] &= b.pn[i]; return *this; } base_uint& operator|=(const base_uint& b) { for (int i = 0; i < WIDTH; i++) pn[i] |= b.pn[i]; return *this; } base_uint& operator++() { // prefix operator for (int i = WIDTH - 1; i >= 0; --i) { pn[i] = htobe32(be32toh(pn[i]) + 1); if (pn[i] != 0) break; } return *this; } const base_uint operator++(int) { // postfix operator const base_uint ret = *this; ++(*this); return ret; } base_uint& operator--() { for (int i = WIDTH - 1; i >= 0; --i) { uint32 prev = pn[i]; pn[i] = htobe32(be32toh(pn[i]) - 1); if (prev != 0) break; } return *this; } const base_uint operator--(int) { // postfix operator const base_uint ret = *this; --(*this); return ret; } base_uint& operator+=(const base_uint& b) { uint64 carry = 0; for (int i = WIDTH; i--;) { uint64 n = carry + be32toh(pn[i]) + be32toh(b.pn[i]); pn[i] = htobe32(n & 0xffffffff); carry = n >> 32; } return *this; } std::size_t hash_combine(std::size_t& seed) const { for (int i = 0; i < WIDTH; ++i) boost::hash_combine(seed, pn[i]); return seed; } friend inline int compare(const base_uint& a, const base_uint& b) { const unsigned char* pA = a.begin(); const unsigned char* pAEnd = a.end(); const unsigned char* pB = b.begin(); while (*pA == *pB) { if (++pA == pAEnd) return 0; ++pB; } return (*pA < *pB) ? -1 : 1; } friend inline bool operator<(const base_uint& a, const base_uint& b) { return compare(a, b) < 0; } friend inline bool operator<=(const base_uint& a, const base_uint& b) { return compare(a, b) <= 0; } friend inline bool operator>(const base_uint& a, const base_uint& b) { return compare(a, b) > 0; } friend inline bool operator>=(const base_uint& a, const base_uint& b) { return compare(a, b) >= 0; } friend inline bool operator==(const base_uint& a, const base_uint& b) { return memcmp(a.pn, b.pn, sizeof(a.pn)) == 0; } friend inline bool operator!=(const base_uint& a, const base_uint& b) { return memcmp(a.pn, b.pn, sizeof(a.pn)) != 0; } std::string GetHex() const { return strHex(begin(), size()); } void SetHexExact(const char* psz) { // must be precisely the correct number of hex digits static signed char phexdigit[256] = { -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,-1,-1, -1,-1,-1,-1, -1,0xa,0xb,0xc, 0xd,0xe,0xf,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,0xa,0xb,0xc, 0xd,0xe,0xf,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, }; char* pOut = reinterpret_cast<char*>(pn); for (int i = 0; i < sizeof(pn); ++i) { *pOut = phexdigit[(unsigned char)*psz++] << 4; *pOut++ |= phexdigit[(unsigned char)*psz++]; } assert(*psz == 0); assert(pOut == reinterpret_cast<char*>(end())); } // Allow leading whitespace. // Allow leading "0x". // To be valid must be '\0' terminated. bool SetHex(const char* psz, bool bStrict=false) { // skip leading spaces if (!bStrict) while (isspace(*psz)) psz++; // skip 0x if (!bStrict && psz[0] == '0' && tolower(psz[1]) == 'x') psz += 2; // hex char to int static signed char phexdigit[256] = { -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,-1,-1, -1,-1,-1,-1, -1,0xa,0xb,0xc, 0xd,0xe,0xf,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,0xa,0xb,0xc, 0xd,0xe,0xf,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, }; const unsigned char* pEnd = reinterpret_cast<const unsigned char*>(psz); const unsigned char* pBegin = pEnd; // Find end. while (phexdigit[*pEnd] >= 0) pEnd++; // Take only last digits of over long string. if ((unsigned int)(pEnd-pBegin) > 2*size()) pBegin = pEnd - 2*size(); unsigned char* pOut = end()-((pEnd-pBegin+1)/2); zero(); if ((pEnd-pBegin) & 1) *pOut++ = phexdigit[*pBegin++]; while (pBegin != pEnd) { unsigned char cHigh = phexdigit[*pBegin++] << 4; unsigned char cLow = pBegin == pEnd ? 0 : phexdigit[*pBegin++]; *pOut++ = cHigh | cLow; } return !*pEnd; } bool SetHex(const std::string& str, bool bStrict=false) { return SetHex(str.c_str(), bStrict); } void SetHexExact(const std::string& str) { SetHexExact(str.c_str()); } std::string ToString() const { return GetHex(); } unsigned char* begin() { return reinterpret_cast<unsigned char*>(pn); } unsigned char* end() { return reinterpret_cast<unsigned char*>(pn + WIDTH); } const unsigned char* begin() const { return reinterpret_cast<const unsigned char*>(pn); } const unsigned char* end() const { return reinterpret_cast<const unsigned char*>(pn + WIDTH); } unsigned int size() const { return sizeof(pn); } void zero() { memset(&pn[0], 0, sizeof(pn)); } unsigned int GetSerializeSize(int nType=0) const { return sizeof(pn); } template<typename Stream> void Serialize(Stream& s, int nType=0) const { s.write((char*)pn, sizeof(pn)); } template<typename Stream> void Unserialize(Stream& s, int nType=0) { s.read((char*)pn, sizeof(pn)); } friend class uint128; friend class uint160; friend class uint256; friend inline int Testuint256AdHoc(std::vector<std::string> vArg); }; typedef base_uint<128> base_uint128; typedef base_uint<160> base_uint160; typedef base_uint<256> base_uint256; // // uint128, uint160, & uint256 could be implemented as templates, but to keep // compile errors and debugging cleaner, they're copy and pasted. // ////////////////////////////////////////////////////////////////////////////// // // uint128 // class uint128 : public base_uint128 { public: typedef base_uint128 basetype; uint128() { zero(); } uint128(const basetype& b) { *this = b; } uint128& operator=(const basetype& b) { for (int i = 0; i < WIDTH; i++) pn[i] = b.pn[i]; return *this; } explicit uint128(const base_uint256& b) { for (int i = 0; i < WIDTH; i++) pn[i] = b.pn[i]; } explicit uint128(const std::vector<unsigned char>& vch) { if (vch.size() == size()) memcpy(pn, &vch[0], size()); else zero(); } }; ////////////////////////////////////////////////////////////////////////////// // // uint256 // class uint256 : public base_uint256 { public: typedef base_uint256 basetype; uint256() { zero(); } uint256(const basetype& b) { *this = b; } uint256& operator=(const basetype& b) { if (pn != b.pn) memcpy(pn, b.pn, sizeof(pn)); return *this; } uint256(uint64 b) { *this = b; } uint256& operator=(uint64 uHost) { zero(); // Put in least significant bits. ((uint64*) end())[-1] = htobe64(uHost); return *this; } explicit uint256(const std::string& str) { SetHex(str); } explicit uint256(const std::vector<unsigned char>& vch) { if (vch.size() == sizeof(pn)) memcpy(pn, &vch[0], sizeof(pn)); else { assert(false); zero(); } } }; inline bool operator==(const uint256& a, uint64 b) { return (base_uint256)a == b; } inline bool operator!=(const uint256& a, uint64 b) { return (base_uint256)a != b; } inline const uint256 operator^(const base_uint256& a, const base_uint256& b) { return uint256(a) ^= b; } inline const uint256 operator&(const base_uint256& a, const base_uint256& b) { return uint256(a) &= b; } inline const uint256 operator|(const base_uint256& a, const base_uint256& b) { return uint256(a) |= b; } inline bool operator==(const base_uint256& a, const uint256& b) { return (base_uint256)a == (base_uint256)b; } inline bool operator!=(const base_uint256& a, const uint256& b) { return (base_uint256)a != (base_uint256)b; } inline const uint256 operator^(const base_uint256& a, const uint256& b) { return (base_uint256)a ^ (base_uint256)b; } inline const uint256 operator&(const base_uint256& a, const uint256& b) { return (base_uint256)a & (base_uint256)b; } inline const uint256 operator|(const base_uint256& a, const uint256& b) { return (base_uint256)a | (base_uint256)b; } inline bool operator==(const uint256& a, const base_uint256& b) { return (base_uint256)a == (base_uint256)b; } inline bool operator!=(const uint256& a, const base_uint256& b) { return (base_uint256)a != (base_uint256)b; } inline const uint256 operator^(const uint256& a, const base_uint256& b) { return (base_uint256)a ^ (base_uint256)b; } inline const uint256 operator&(const uint256& a, const base_uint256& b) { return uint256(a) &= b; } inline const uint256 operator|(const uint256& a, const base_uint256& b) { return (base_uint256)a | (base_uint256)b; } inline bool operator==(const uint256& a, const uint256& b) { return (base_uint256)a == (base_uint256)b; } inline bool operator!=(const uint256& a, const uint256& b) { return (base_uint256)a != (base_uint256)b; } inline const uint256 operator^(const uint256& a, const uint256& b) { return (base_uint256)a ^ (base_uint256)b; } inline const uint256 operator&(const uint256& a, const uint256& b) { return (base_uint256)a & (base_uint256)b; } inline const uint256 operator|(const uint256& a, const uint256& b) { return (base_uint256)a | (base_uint256)b; } extern std::size_t hash_value(const uint256&); template<unsigned int BITS> inline std::ostream& operator<<(std::ostream& out, const base_uint<BITS>& u) { return out << u.GetHex(); } inline int Testuint256AdHoc(std::vector<std::string> vArg) { uint256 g(0); printf("%s\n", g.ToString().c_str()); --g; printf("--g\n"); printf("%s\n", g.ToString().c_str()); g--; printf("g--\n"); printf("%s\n", g.ToString().c_str()); g++; printf("g++\n"); printf("%s\n", g.ToString().c_str()); ++g; printf("++g\n"); printf("%s\n", g.ToString().c_str()); g++; printf("g++\n"); printf("%s\n", g.ToString().c_str()); ++g; printf("++g\n"); printf("%s\n", g.ToString().c_str()); uint256 a(7); printf("a=7\n"); printf("%s\n", a.ToString().c_str()); uint256 b; printf("b undefined\n"); printf("%s\n", b.ToString().c_str()); int c = 3; a = c; a.pn[3] = 15; printf("%s\n", a.ToString().c_str()); uint256 k(c); a = 5; a.pn[3] = 15; printf("%s\n", a.ToString().c_str()); b = 1; // b <<= 52; a |= b; // a ^= 0x500; printf("a %s\n", a.ToString().c_str()); a = a | b | (uint256)0x1000; printf("a %s\n", a.ToString().c_str()); printf("b %s\n", b.ToString().c_str()); a = 0xfffffffe; a.pn[4] = 9; printf("%s\n", a.ToString().c_str()); a++; printf("%s\n", a.ToString().c_str()); a++; printf("%s\n", a.ToString().c_str()); a++; printf("%s\n", a.ToString().c_str()); a++; printf("%s\n", a.ToString().c_str()); a--; printf("%s\n", a.ToString().c_str()); a--; printf("%s\n", a.ToString().c_str()); a--; printf("%s\n", a.ToString().c_str()); uint256 d = a--; printf("%s\n", d.ToString().c_str()); printf("%s\n", a.ToString().c_str()); a--; printf("%s\n", a.ToString().c_str()); a--; printf("%s\n", a.ToString().c_str()); d = a; printf("%s\n", d.ToString().c_str()); for (int i = uint256::WIDTH-1; i >= 0; i--) printf("%08x", d.pn[i]); printf("\n"); uint256 neg = d; neg = ~neg; printf("%s\n", neg.ToString().c_str()); uint256 e = uint256("0xABCDEF123abcdef12345678909832180000011111111"); printf("\n"); printf("%s\n", e.ToString().c_str()); printf("\n"); uint256 x1 = uint256("0xABCDEF123abcdef12345678909832180000011111111"); uint256 x2; printf("%s\n", x1.ToString().c_str()); for (int i = 0; i < 270; i += 4) { // x2 = x1 << i; printf("%s\n", x2.ToString().c_str()); } printf("\n"); printf("%s\n", x1.ToString().c_str()); for (int i = 0; i < 270; i += 4) { x2 = x1; // x2 >>= i; printf("%s\n", x2.ToString().c_str()); } #if 0 for (int i = 0; i < 100; i++) { uint256 k = (~uint256(0) >> i); printf("%s\n", k.ToString().c_str()); } for (int i = 0; i < 100; i++) { uint256 k = (~uint256(0) << i); printf("%s\n", k.ToString().c_str()); } #endif return (0); } ////////////////////////////////////////////////////////////////////////////// // // uint160 // class uint160 : public base_uint160 { public: typedef base_uint160 basetype; uint160() { zero(); } uint160(const basetype& b) { *this = b; } uint160& operator=(const basetype& b) { for (int i = 0; i < WIDTH; i++) pn[i] = b.pn[i]; return *this; } uint160(uint64 b) { *this = b; } uint160& operator=(uint64 uHost) { zero(); // Put in least significant bits. ((uint64*) end())[-1] = htobe64(uHost); return *this; } explicit uint160(const std::string& str) { SetHex(str); } explicit uint160(const std::vector<unsigned char>& vch) { if (vch.size() == sizeof(pn)) memcpy(pn, &vch[0], sizeof(pn)); else zero(); } base_uint256 to256() const { uint256 m; memcpy(m.begin(), begin(), size()); return m; } }; inline bool operator==(const uint160& a, uint64 b) { return (base_uint160)a == b; } inline bool operator!=(const uint160& a, uint64 b) { return (base_uint160)a != b; } inline const uint160 operator^(const base_uint160& a, const base_uint160& b) { return uint160(a) ^= b; } inline const uint160 operator&(const base_uint160& a, const base_uint160& b) { return uint160(a) &= b; } inline const uint160 operator|(const base_uint160& a, const base_uint160& b) { return uint160(a) |= b; } inline bool operator==(const base_uint160& a, const uint160& b) { return (base_uint160)a == (base_uint160)b; } inline bool operator!=(const base_uint160& a, const uint160& b) { return (base_uint160)a != (base_uint160)b; } inline const uint160 operator^(const base_uint160& a, const uint160& b) { return (base_uint160)a ^ (base_uint160)b; } inline const uint160 operator&(const base_uint160& a, const uint160& b) { return (base_uint160)a & (base_uint160)b; } inline const uint160 operator|(const base_uint160& a, const uint160& b) { return (base_uint160)a | (base_uint160)b; } inline bool operator==(const uint160& a, const base_uint160& b) { return (base_uint160)a == (base_uint160)b; } inline bool operator!=(const uint160& a, const base_uint160& b) { return (base_uint160)a != (base_uint160)b; } inline const uint160 operator^(const uint160& a, const base_uint160& b) { return (base_uint160)a ^ (base_uint160)b; } inline const uint160 operator&(const uint160& a, const base_uint160& b) { return (base_uint160)a & (base_uint160)b; } inline const uint160 operator|(const uint160& a, const base_uint160& b) { return (base_uint160)a | (base_uint160)b; } inline bool operator==(const uint160& a, const uint160& b) { return (base_uint160)a == (base_uint160)b; } inline bool operator!=(const uint160& a, const uint160& b) { return (base_uint160)a != (base_uint160)b; } inline const uint160 operator^(const uint160& a, const uint160& b) { return (base_uint160)a ^ (base_uint160)b; } inline const uint160 operator&(const uint160& a, const uint160& b) { return (base_uint160)a & (base_uint160)b; } inline const uint160 operator|(const uint160& a, const uint160& b) { return (base_uint160)a | (base_uint160)b; } extern std::size_t hash_value(const uint160&); inline const std::string strHex(const uint160& ui) { return strHex(ui.begin(), ui.size()); } #endif // vim:ts=4
import java.util.Scanner; import java.io.PrintWriter; import java.util.*; import java.lang.Math; public class abb{ private static Scanner sc = new Scanner(System.in); public static void p(long n,long k){ //long totali=n*k*2; //long l=(int)Math.sqrt(n*k*2); //long hi=n*k*2; //while(l<hi){ // long mid=(l+hi)/2; // if(mid*mid<totali) l=mid+1; // else hi=mid; //} long d; if(n>k){ d=k*2; long z=Math.max(d,n); System.out.println(z*z); } else{ d=n*2; long z=Math.max(d,k); System.out.println(z*z); } } public static void main(String[] args) { //int numOfBlocks = sc.nextInt(); int c=sc.nextInt(); while(c-->0){ int a=sc.nextInt(); int b=sc.nextInt(); p(a,b); } } }
class InlineQuery: '''id: Unique identifier for this query''' id: str '''from_user: Sender''' from_user: User '''location: Sender location, only for bots that request user location''' location: Optional[Location] '''query: Text of the query (up to 256 characters)''' query: str '''offset: Offset of the results to be returned, can be controlled by the bot''' offset: str
/** * @class Lunch * @docs https://bell.harker.org/docs/api.html#get-lunchmenu */ export class Lunch { /** * Date of data * @type {!string} * @memberof Lunch */ date!: string /** * Lunch of the day * @type {!{place: string, food: string}[]} * @memberof Lunch */ lunch!: { place: string, food: string }[] /** * Get all open food places of the day * @type {void} * @returns {string[]} * @memberof Lunch */ getPlaces(): string[] { return this.lunch.map((item) => item.place) } /** * Get all food of the day * @type {void} * @returns {string[]} * @memberof Lunch */ getFoods(): string[] { return this.lunch.map((item) => item.food) } } /** * @class Schedule * @docs https://bell.harker.org/docs/api.html#get-schedule */ export class Schedule { /** * Date of data * @type {!string} * @memberof Schedule */ date!: string /** * Letter code of schedule * @type {!string} * @memberof Schedule */ code!: string /** * Indication of special schedules * @type {?string} * @memberof Schedule */ variant?: string /** * Name of modified schedule * @type {?string} * @memberof Schedule */ name?: string /** * Schedule of the day * @type {!{start: string, end: string, name: string}[]} * @memberof Schedule */ schedule!: { start: string, end: string, name: string }[] } /** * @class Events * @docs https://bell.harker.org/docs/api.html#get-events */ export class Events { /** * Date of data * @type {!string} * @memberof Events */ date!: string /** * Events of the day * @type {!{name: string, start: string, end: string, category: "schoolwide" | "academics" | "important" | "athspirit" | "extra" | "perfarts" | "clubs" | "special" | "info" | "other"}[]} * @memberof Events */ events!: { name: string, start: string, end: string, category: "schoolwide" | "academics" | "important" | "athspirit" | "extra" | "perfarts" | "clubs" | "special" | "info" | "other" }[] } /** * @interface lunchCallback */ export interface lunchCallback { (res: Lunch): void } /** * @interface scheduleCallback */ export interface scheduleCallback { (res: Schedule): void } /** * @interface eventsCallback */ export interface eventsCallback { (res: Events): void }
Tony Ortega was questioned by one of his followers on my exposure of Tony’s sleazy, dishonest reporting the other day on My Scientology Movie. The inquirer purports to be a full-fledged bunkeroo, and law school graduate. She also regularly trolls my blog, slavishly pandering to Ortega. Here is the thread: Chee Chalker • 3 days ago Tony, Marty claims that you misquoted him. Or maybe it’s that you didn’t quote him enough. You only printed 1/20th of his response. Your post says you asked him about his recent posts. His post said his response to you “cognitive dissonance and paranoia”) was in response to another question of yours. Maybe I am a glutton for punishment, but is his entire response worth posting? Tony Ortega Mod Chee Chalker • 3 days ago Sorry. Not going to be baited into publishing material I can’t make public. I made public what I can at this point. Tony Ortega Mod Tony Ortega • 3 days ago And no, he was not misquoted. — I never said Ortega “misquoted me”. I actually wrote the following in response to a commenter on my blog who suggested that I am responsible for Ortega’s sickness by not answering his questions: “Ortega never asked me about my review. He published 1/20 of a response to an entirely different question. Clearly, he owns you.” Ortega was in fact challenging me on my use of the term “ASC”, while hurling abusive accusations at me. In either event, Tony goes on owning Chee Chalker and the rest of his sheep with claims of insider knowledge that justifies/validates his ongoing innuendo campaign: “Sorry. Not going to be baited into publishing material I can’t make public. I made public what I can at this point.” That is how he kept his faithful drinking his Kool Aid during his four-month campaign against my family. He claims insider knowledge that he is afraid of the consequences of sharing. Another part of my answer referred to above that he only published 1/20th (as you’ll see, it might have been more like 1/100th) of out of context, confronted Ortega with that very sleaze tactic he is so fond of. To wit, “If inventions of this kind are required for you to understand my posts and the use of the term ASC, your mind is far more infected with cognitive dissonance and paranoia than any scientologist I have ever encountered. You are also a coward and liar. You represented to your readers on several occasions in writing that you know what actually went on [with Monique’s lawsuit], but couldn’t disclose it at that time. That was stated in response to several people speculating as to milder motives on our part than you projected. You used definitive statements as to your insider, undisclosed knowledge to forward your hate against us. I knew you were lying then. You have now confirmed that fact in writing below.” The “inventions of this kind” referred to had nothing to do with My Scientology Movie or anything Ortega claimed it was in response to. Some of the published comments by Ortega during his anti-Rathbun campaign that I was referring to here were: “I know more than I can say.” and “I know what happened. What you propose isn’t it.” and “I have some details that I can’t talk about yet. I will when I can.” It has been nearly five months since he implied that he would put up or shut up – and as per usual, he instead lied and has done neither. He continues perpetuating the fraud with more of the same dissembling conduct. There is no difference whatsoever in the tactics Tony Ortega regularly uses to warp his followers’ minds than what he accuses the object of his obsession with. Kettle, meet pot.
// ClearTag finishes handling a tag. func (cs *connState) ClearTag(t Tag) { cs.tagMu.Lock() defer cs.tagMu.Unlock() ch, ok := cs.tags[t] if !ok { panic("unused tag cleared") } delete(cs.tags, t) close(ch) }
/** * @author Rinat Gareev */ public class ListWhitespaceChars { public static void main(String args[]) { System.out.println("Code\tWhite\tCategory"); for (int i = 0x0000; i <= 0xFFFF; i++) { if (Character.isWhitespace(i) || isControl(i) || isSep(i)) { System.out.println(format(i)); } } } private static String getCharType(int ch) { int chType = Character.getType(ch); switch (chType) { case Character.SPACE_SEPARATOR: return "SPACE_SEPARATOR"; case Character.LINE_SEPARATOR: return "LINE_SEPARATOR"; case Character.PARAGRAPH_SEPARATOR: return "PARAGRAPH_SEPARATOR"; default: return String.valueOf(chType); } } private static boolean isControl(int ch) { return Character.CONTROL == Character.getType(ch); } private static boolean isSep(int ch) { switch (Character.getType(ch)) { case Character.SPACE_SEPARATOR: case Character.LINE_SEPARATOR: case Character.PARAGRAPH_SEPARATOR: return true; default: return false; } } private static String format(int codePoint) { return String.format("U%04x\t%s\t%s", codePoint, Character.isWhitespace(codePoint), getCharType(codePoint)); } }
/* ******************************* ----------------------------- | copyrights with l0gic_b0mb | ----------------------------- ******************************* */ #include<bits/stdc++.h> #define MOD 1000000007 #define llint long long int #define max(a,b) (a>=b?a:b) #define min(a,b) (a<=b?a:b) using namespace std; vector<int> prefix; int cumu_idx[100001]; int give_idx(int length) { if(length==0) { return 0; } if(cumu_idx[length]!=-1) { return cumu_idx[length]; } return cumu_idx[length]=give_idx(prefix[length-1]); } void process(string& s,vector<int>& v) { v.resize(s.size()); memset(cumu_idx,-1,sizeof cumu_idx); for(int i=0; i<v.size(); i++) { v[i]=0; } for(int i=1,j=0; i<s.size(); ) { if(s[i]==s[j]) { j++; v[i]=j; i++; } else { if(j==0) { v[i]=0; i++; } else { j = v[j-1]; } } } return; } int main() { string s; cin>>s; process(s,prefix); vector<int> valid_length; int idx=s.size()-1; while(prefix[idx]>0) { valid_length.push_back(prefix[idx]); idx = prefix[idx]-1; } reverse(valid_length.begin(),valid_length.end()); map<int,int> id; for(int i=0;i<valid_length.size();i++) { //cout<<valid_length[i]<<" "; cumu_idx[valid_length[i]]=i+1; id[valid_length[i]]=i; } //cout<<endl; vector<int> cumulative(valid_length.size()+1,0); for(int i=1;i<s.size();i++) { cumulative[0]++; cumulative[give_idx(prefix[i])]--; } vector< pair<int,int> > ans; for(int i=0;i<valid_length.size();i++) { if(i-1>=0) cumulative[i]+=cumulative[i-1]; ans.push_back({valid_length[i],cumulative[i]}); } ans.push_back({s.size(),0}); cout<<ans.size()<<endl; for(int i=0;i<ans.size();i++) { cout<<ans[i].first<<" "<<ans[i].second+1<<endl; } return 0; } /* Hidden within the kernel is a l0gic_b0mb; malicious code designed to execute under circumstances I've programmed. */
def intervals_in_genomic_sort_order(interval_strings): def sort_key(interval_string): chrom, positions = interval_string.split(":") if chrom in CHROMOSOME_ORDER: chrom_ordinal = CHROMOSOME_ORDER.index(chrom) else: chrom_ordinal = sum(ord(c)*10**(len(chrom) - i) for i, c in enumerate(chrom)) start_pos, end_pos = positions.split("-") return chrom_ordinal, int(start_pos), int(end_pos) return sorted(interval_strings, key=sort_key)
import { ASTv2, generateSyntaxError, SourceSlice, SourceSpan } from '@glimmer/syntax'; import { expect } from '@glimmer/util'; import { Err, Ok, Result } from '../../../shared/result'; import * as mir from '../../2-encoding/mir'; import { NormalizationState } from '../context'; import { VISIT_EXPRS } from '../visitors/expressions'; import { assertValidHasBlockUsage } from './has-block'; import { keywords } from './impl'; export const APPEND_KEYWORDS = keywords('Append') .kw('yield', { assert( node: ASTv2.AppendContent ): Result<{ target: SourceSlice; positional: ASTv2.PositionalArguments; }> { let { args } = node; if (args.named.isEmpty()) { return Ok({ target: SourceSpan.synthetic('default').toSlice(), positional: args.positional, }); } else { let target = args.named.get('to'); if (args.named.size > 1 || target === null) { return Err( generateSyntaxError(`yield only takes a single named argument: 'to'`, args.named.loc) ); } if (ASTv2.isLiteral(target, 'string')) { return Ok({ target: target.toSlice(), positional: args.positional }); } else { return Err( generateSyntaxError(`you can only yield to a literal string value`, target.loc) ); } } }, translate( { node, state }: { node: ASTv2.AppendContent; state: NormalizationState }, { target, positional, }: { target: SourceSlice; positional: ASTv2.PositionalArguments; } ): Result<mir.Statement> { return VISIT_EXPRS.Positional(positional, state).mapOk( (positional) => new mir.Yield({ loc: node.loc, target, to: state.scope.allocateBlock(target.chars), positional, }) ); }, }) .kw('partial', { assert(node: ASTv2.AppendContent): Result<ASTv2.ExpressionNode | undefined> { let { args: { positional, named }, } = node; let { trusting } = node; if (positional.isEmpty()) { return Err( generateSyntaxError( `Partial found with no arguments. You must specify a template name`, node.loc ) ); } else if (positional.size !== 1) { return Err( generateSyntaxError( `Partial found with ${positional.exprs.length} arguments. You must specify a template name`, node.loc ) ); } if (named.isEmpty()) { if (trusting) { return Err( generateSyntaxError( `{{{partial ...}}} is not supported, please use {{partial ...}} instea`, node.loc ) ); } return Ok(expect(positional.nth(0), `already confirmed that positional has a 0th entry`)); } else { return Err(generateSyntaxError(`Partial does not take any named argument`, node.loc)); } }, translate( { node, state }: { node: ASTv2.AppendContent; state: NormalizationState }, expr: ASTv2.ExpressionNode | undefined ): Result<mir.Statement> { state.scope.setHasEval(); let visited = expr === undefined ? Ok( new ASTv2.LiteralExpression({ loc: SourceSpan.synthetic('undefined'), value: undefined, }) ) : VISIT_EXPRS.visit(expr, state); return visited.mapOk( (target) => new mir.Partial({ loc: node.loc, scope: state.scope, target }) ); }, }) .kw('debugger', { assert(node: ASTv2.AppendContent): Result<void> { let { args } = node; let { positional } = args; if (args.isEmpty()) { return Ok(undefined); } else { if (positional.isEmpty()) { return Err(generateSyntaxError(`debugger does not take any named arguments`, node.loc)); } else { return Err( generateSyntaxError(`debugger does not take any positional arguments`, node.loc) ); } } }, translate({ node, state: { scope }, }: { node: ASTv2.AppendContent; state: NormalizationState; }): Result<mir.Statement> { scope.setHasEval(); return Ok(new mir.Debugger({ loc: node.loc, scope })); }, }) .kw('has-block', { assert(node: ASTv2.AppendContent): Result<SourceSlice> { return assertValidHasBlockUsage('has-block', node); }, translate( { node, state: { scope } }: { node: ASTv2.AppendContent; state: NormalizationState }, target: SourceSlice ): Result<mir.AppendTextNode> { let text = new mir.HasBlock({ loc: node.loc, target, symbol: scope.allocateBlock(target.chars), }); return Ok(new mir.AppendTextNode({ loc: node.loc, text })); }, }) .kw('has-block-params', { assert(node: ASTv2.AppendContent): Result<SourceSlice> { return assertValidHasBlockUsage('has-block-params', node); }, translate( { node, state: { scope } }: { node: ASTv2.AppendContent; state: NormalizationState }, target: SourceSlice ): Result<mir.AppendTextNode> { let text = new mir.HasBlockParams({ loc: node.loc, target, symbol: scope.allocateBlock(target.chars), }); return Ok(new mir.AppendTextNode({ loc: node.loc, text })); }, }) .kw('component', { assert( node: ASTv2.AppendContent ): Result<{ args: ASTv2.Args; }> { let { args } = node; let definition = args.nth(0); if (definition === null) { return Err( generateSyntaxError( `{{component}} requires a component definition or identifier as its first positional parameter, did not receive any parameters.`, args.loc ) ); } args = new ASTv2.Args({ positional: new ASTv2.PositionalArguments({ exprs: args.positional.exprs.slice(1), loc: args.positional.loc, }), named: args.named, loc: args.loc, }); return Ok({ definition, args }); }, translate( { node, state }: { node: ASTv2.AppendContent; state: NormalizationState }, { definition, args }: { definition: ASTv2.ExpressionNode; args: ASTv2.Args } ): Result<mir.InvokeComponent> { let definitionResult = VISIT_EXPRS.visit(definition, state); let argsResult = VISIT_EXPRS.Args(args, state); return Result.all(definitionResult, argsResult).mapOk( ([definition, args]) => new mir.InvokeComponent({ loc: node.loc, definition, args, blocks: null, }) ); }, });
PHILADELPHIA (CBS) — Police are investigating the deadly shooting of a woman in North Philadelphia. The shooting took place at about 9:15 a.m. Tuesday at 12th and Jefferson Streets, only blocks from the campus of Temple University. The family of 56-year-old Kim Jones is looking for answers after a woman described as hard-working and well-educated was murdered in broad daylight. “She was just a good person,” said family member Deitra Jubilee. “It’s just baffling when somebody is trying to go to work, then never sees it coming. She has no enemies.” Police say Jones was waiting for a SEPTA bus to take to work. She was standing at the corner of 12th and Jefferson. Investigators say the gunman walked up and shot her once in the back of the head. Jones died at the scene, just a few steps away from home. “We knew she was targeted for whatever reason. We do not believe it was a robbery,” said Philadelphia Police Captain James Clark. “Someone knew her routine. Somebody knew every morning she got on that bus to go to work and ambushed her. Who?” “She had on headphones, she had a purse, she had jewelry, she had a cell phone, none of which was disturbed or taken,” said Clark. Jones was an employee at Turning Points for Children, where she was a program director. Her colleagues say she was an outstanding child advocate. “I don’t know how we’re going to go forward. We’ve never had to deal with anything like this,” CEO Michael Vogel told Eyewitness News. Vogel said he personally recruited and hired Jones almost a decade ago. According to investigators, the suspect is described as a heavy-set black male dressed in all black and carrying a duffel bag. He was last seen heading westbound on Jefferson Street. The shooter remains on the loose. Anyone with information on the suspect is urged to call 911 or the Philadelphia Homicide Unit at 215-686-3334 or 3335. The deadly shooting remains under investigation. CBS 3’s Steve Patterson and David Spunt and KYW Newsradio’s John McDevitt contributed to this report. You May Also Be Interested In:
/// \brief A plugin to transport a model from point to point using /// pose animation. class ModelMove : public ModelPlugin { public: ModelMove(); private: void Move(const math::Vector3 &_start, const math::Vector3 &_end, math::Vector3 &_translation); private: bool LoadGoalsFromSDF(const sdf::ElementPtr _sdf); public: void InitiateMove(); public: void OnPathMsg(ConstPoseAnimationPtr &_msg); public: void Load(physics::ModelPtr _parent, sdf::ElementPtr _sdf); private: physics::ModelPtr model; private: gazebo::common::PoseAnimationPtr anim; private: transport::NodePtr node; private: transport::SubscriberPtr pathSubscriber; private: math::Vector3 startPosition; private: std::vector<math::Pose> pathGoals; }
// report is used to deliver the func (m *Monit) report() { m.getStat() stat, _ := json.Marshal(m.config.Base) buf := bytes.NewBuffer(stat) r, _ := client.Post(m.config.Host, "application/json", buf) if r != nil { defer r.Body.Close() } }
<gh_stars>1-10 #ifndef __RECTANGLE_TEXT_H__ #define __RECTANGLE_TEXT_H__ #include <SDL2/SDL.h> #include <string> #include "drawable.h" #include "font.h" #include "paths.h" #include "texture.h" #define DEFAULT_PADDING_RECTANGLE_TEXT 2 namespace View { class RectangleText: public Drawable { private: Texture text; Font font; SDL_Color textColor; SDL_Color backgroundColor; int padding; bool hide; public: RectangleText( int h, int padding = DEFAULT_PADDING_RECTANGLE_TEXT, std::string path = gPath.PATH_FONT_ARIAL_BOLD ); ~RectangleText(); void setText(SDL_Renderer *, const std::string &); void setTextColor(SDL_Color &); void setBackgroundColor(SDL_Color &); void toggleHide(bool); virtual int getWidth(void) const; virtual int getHeight(void) const; virtual int getX(void) const; virtual int getY(void) const; virtual void setX(int); virtual void setY(int); virtual void render(SDL_Renderer *, int, int); }; } #endif
package models import ( "github.com/satori/go.uuid" "github.com/Jsharkc/TechTree/backend/general" "github.com/Jsharkc/TechTree/backend/tidb" ) type UserAddedServiceProvider struct { } var UserAddedService *UserAddedServiceProvider = &UserAddedServiceProvider{} type UserAdded struct { ID string `json:"id" gorm:"column:id"` NID string `json:"nid" gorm:"column:nid" valid:"Required"` UID string `json:"uid" gorm:"column:uid"` Description string `json:"desci" gorm:"column:desci" valid:"Required"` Status int `json:"status" gorm:"column:status"` Type int `json:"type" gorm:"column:thetype" valid:"Required"` } type UpdateUserAdded struct { ID string `json:"id" gorm:"column:id" valid:"Required"` Status int `json:"status" gorm:"column:status"` } func (ua UserAdded) TableName() string { return "useradded" } func (uas *UserAddedServiceProvider) UserAdded(u *UserAdded) error { u.ID = uuid.NewV4().String() u.Status = general.Initial return tidb.Conn.Model(&UserAdded{}).Create(u).Error } func (uas *UserAddedServiceProvider) Update(id *string, status int) error { return tidb.Conn.Model(&UserAdded{}).Where("id = ?", *id).Update("status", status).Error } func (uas *UserAddedServiceProvider) ListQues() ([]UserAdded, error) { var ua []UserAdded err := tidb.Conn.Model(&UserAdded{Type: general.AddedTypeQues}).Find(&ua).Error return ua, err } func (uas *UserAddedServiceProvider) ListKnow() ([]UserAdded, error) { var ua []UserAdded err := tidb.Conn.Model(&UserAdded{Type: general.AddedTypeKnow}).Find(&ua).Error return ua, err }
<reponame>HaoZeke/math #ifndef STAN_MATH_REV_FUN_CHOLESKY_DECOMPOSE_HPP #define STAN_MATH_REV_FUN_CHOLESKY_DECOMPOSE_HPP #include <stan/math/rev/meta.hpp> #include <stan/math/rev/core.hpp> #include <stan/math/rev/fun/value_of_rec.hpp> #include <stan/math/rev/fun/value_of.hpp> #include <stan/math/prim/err.hpp> #include <stan/math/prim/fun/cholesky_decompose.hpp> #include <stan/math/prim/fun/Eigen.hpp> #include <stan/math/prim/fun/typedefs.hpp> #include <stan/math/prim/fun/value_of_rec.hpp> #ifdef STAN_OPENCL #include <stan/math/opencl/rev/opencl.hpp> #endif #include <algorithm> #include <vector> namespace stan { namespace math { namespace internal { /** * Set the lower right triangular of a var matrix given a set of vari** * * @param L Matrix of vars * @param vari_ref Values to be set in lower right triangular of L. * @return None, L modified by reference. */ inline void set_lower_tri_coeff_ref(Eigen::Matrix<var, -1, -1>& L, vari** vari_ref) { size_t pos = 0; vari* dummy = new vari(0.0, false); for (size_type j = 0; j < L.cols(); ++j) { for (size_type i = j; i < L.cols(); ++i) { L.coeffRef(i, j).vi_ = vari_ref[pos++]; } for (size_type k = 0; k < j; ++k) { L.coeffRef(k, j).vi_ = dummy; } } return; } } // namespace internal class cholesky_block : public vari { public: int M_; int block_size_; using Block_ = Eigen::Block<Eigen::MatrixXd>; vari** vari_ref_A_; vari** vari_ref_L_; /** * Constructor for Cholesky function. * * Stores varis for A. Instantiates and stores varis for L. * Instantiates and stores dummy vari for upper triangular part of var * result returned in cholesky_decompose function call * * variRefL aren't on the chainable autodiff stack, only used for storage * and computation. Note that varis for L are constructed externally in * cholesky_decompose. * * block_size_ determined using the same calculation Eigen/LLT.h * * @param A matrix * @param L_A matrix, Cholesky factor of A */ cholesky_block(const Eigen::Matrix<var, -1, -1>& A, const Eigen::Matrix<double, -1, -1>& L_A) : vari(0.0), M_(A.rows()), vari_ref_A_(ChainableStack::instance_->memalloc_.alloc_array<vari*>( A.rows() * (A.rows() + 1) / 2)), vari_ref_L_(ChainableStack::instance_->memalloc_.alloc_array<vari*>( A.rows() * (A.rows() + 1) / 2)) { size_t pos = 0; block_size_ = std::max(M_ / 8, 8); block_size_ = std::min(block_size_, 128); for (size_type j = 0; j < M_; ++j) { for (size_type i = j; i < M_; ++i) { vari_ref_A_[pos] = A.coeffRef(i, j).vi_; vari_ref_L_[pos] = new vari(L_A.coeffRef(i, j), false); ++pos; } } } /** * Symbolic adjoint calculation for Cholesky factor A * * @param L Cholesky factor * @param L_adj matrix of adjoints of L */ inline void symbolic_rev(Block_& L, Block_& L_adj) { using Eigen::Lower; using Eigen::StrictlyUpper; using Eigen::Upper; L.transposeInPlace(); L_adj = (L * L_adj.triangularView<Lower>()).eval(); L_adj.triangularView<StrictlyUpper>() = L_adj.adjoint().triangularView<StrictlyUpper>(); L.triangularView<Upper>().solveInPlace(L_adj); L.triangularView<Upper>().solveInPlace(L_adj.transpose()); } /** * Reverse mode differentiation algorithm reference: * * <NAME>: Differentiation of the Cholesky decomposition, 2016. * */ virtual void chain() { using Eigen::Block; using Eigen::Lower; using Eigen::MatrixXd; using Eigen::StrictlyUpper; using Eigen::Upper; auto L_adj = Eigen::MatrixXd::Zero(M_, M_).eval(); auto L = Eigen::MatrixXd::Zero(M_, M_).eval(); size_t pos = 0; for (size_type j = 0; j < M_; ++j) { for (size_type i = j; i < M_; ++i) { L_adj.coeffRef(i, j) = vari_ref_L_[pos]->adj_; L.coeffRef(i, j) = vari_ref_L_[pos]->val_; ++pos; } } for (int k = M_; k > 0; k -= block_size_) { int j = std::max(0, k - block_size_); Block_ R = L.block(j, 0, k - j, j); Block_ D = L.block(j, j, k - j, k - j); Block_ B = L.block(k, 0, M_ - k, j); Block_ C = L.block(k, j, M_ - k, k - j); Block_ R_adj = L_adj.block(j, 0, k - j, j); Block_ D_adj = L_adj.block(j, j, k - j, k - j); Block_ B_adj = L_adj.block(k, 0, M_ - k, j); Block_ C_adj = L_adj.block(k, j, M_ - k, k - j); if (C_adj.size() > 0) { C_adj = D.transpose() .triangularView<Upper>() .solve(C_adj.transpose()) .transpose(); B_adj.noalias() -= C_adj * R; D_adj.noalias() -= C_adj.transpose() * C; } symbolic_rev(D, D_adj); R_adj.noalias() -= C_adj.transpose() * B; R_adj.noalias() -= D_adj.selfadjointView<Lower>() * R; D_adj.diagonal() *= 0.5; D_adj.triangularView<StrictlyUpper>().setZero(); } pos = 0; for (size_type j = 0; j < M_; ++j) { for (size_type i = j; i < M_; ++i) { vari_ref_A_[pos++]->adj_ += L_adj.coeffRef(i, j); } } } }; class cholesky_scalar : public vari { public: int M_; vari** vari_ref_A_; vari** vari_ref_L_; /** * Constructor for Cholesky function. * * Stores varis for A. Instantiates and stores varis for L. Instantiates * and stores dummy vari for upper triangular part of var result returned * in cholesky_decompose function call * * variRefL aren't on the chainable autodiff stack, only used for storage * and computation. Note that varis for L are constructed externally in * cholesky_decompose. * * @param A matrix * @param L_A matrix, Cholesky factor of A */ cholesky_scalar(const Eigen::Matrix<var, -1, -1>& A, const Eigen::Matrix<double, -1, -1>& L_A) : vari(0.0), M_(A.rows()), vari_ref_A_(ChainableStack::instance_->memalloc_.alloc_array<vari*>( A.rows() * (A.rows() + 1) / 2)), vari_ref_L_(ChainableStack::instance_->memalloc_.alloc_array<vari*>( A.rows() * (A.rows() + 1) / 2)) { size_t accum = 0; size_t accum_i = accum; for (size_type j = 0; j < M_; ++j) { for (size_type i = j; i < M_; ++i) { accum_i += i; size_t pos = j + accum_i; vari_ref_A_[pos] = A.coeffRef(i, j).vi_; vari_ref_L_[pos] = new vari(L_A.coeffRef(i, j), false); } accum += j; accum_i = accum; } } /** * Reverse mode differentiation algorithm reference: * * <NAME>. An extended collection of matrix derivative results for * forward and reverse mode AD. Jan. 2008. * * Note algorithm as laid out in Giles is row-major, so Eigen::Matrices * are explicitly storage order RowMajor, whereas Eigen defaults to * ColumnMajor. Also note algorithm starts by calculating the adjoint for * A(M_ - 1, M_ - 1), hence pos on line 94 is decremented to start at pos * = M_ * (M_ + 1) / 2. */ virtual void chain() { using Eigen::Matrix; using Eigen::RowMajor; Matrix<double, -1, -1, RowMajor> adjL(M_, M_); Matrix<double, -1, -1, RowMajor> LA(M_, M_); Matrix<double, -1, -1, RowMajor> adjA(M_, M_); size_t pos = 0; for (size_type i = 0; i < M_; ++i) { for (size_type j = 0; j <= i; ++j) { adjL.coeffRef(i, j) = vari_ref_L_[pos]->adj_; LA.coeffRef(i, j) = vari_ref_L_[pos]->val_; ++pos; } } --pos; for (int i = M_ - 1; i >= 0; --i) { for (int j = i; j >= 0; --j) { if (i == j) { adjA.coeffRef(i, j) = 0.5 * adjL.coeff(i, j) / LA.coeff(i, j); } else { adjA.coeffRef(i, j) = adjL.coeff(i, j) / LA.coeff(j, j); adjL.coeffRef(j, j) -= adjL.coeff(i, j) * LA.coeff(i, j) / LA.coeff(j, j); } for (int k = j - 1; k >= 0; --k) { adjL.coeffRef(i, k) -= adjA.coeff(i, j) * LA.coeff(j, k); adjL.coeffRef(j, k) -= adjA.coeff(i, j) * LA.coeff(i, k); } vari_ref_A_[pos--]->adj_ += adjA.coeffRef(i, j); } } } }; #ifdef STAN_OPENCL class cholesky_opencl : public vari { public: int M_; vari** vari_ref_A_; vari** vari_ref_L_; /** * Constructor for OpenCL Cholesky function. * * Stores varis for A. Instantiates and stores varis for L. * Instantiates and stores dummy vari for upper triangular part of var * result returned in cholesky_decompose function call * * variRefL aren't on the chainable autodiff stack, only used for storage * and computation. Note that varis for L are constructed externally in * cholesky_decompose. * * @param A matrix * @param L_A Cholesky factor of A */ cholesky_opencl(const Eigen::Matrix<var, -1, -1>& A, const Eigen::Matrix<double, -1, -1>& L_A) : vari(0.0), M_(A.rows()), vari_ref_A_(ChainableStack::instance_->memalloc_.alloc_array<vari*>( A.rows() * (A.rows() + 1) / 2)), vari_ref_L_(ChainableStack::instance_->memalloc_.alloc_array<vari*>( A.rows() * (A.rows() + 1) / 2)) { size_t pos = 0; for (size_type j = 0; j < M_; ++j) { for (size_type i = j; i < M_; ++i) { vari_ref_A_[pos] = A.coeffRef(i, j).vi_; vari_ref_L_[pos] = new vari(L_A.coeffRef(i, j), false); ++pos; } } } /** * Symbolic adjoint calculation for Cholesky factor A * * @param L Cholesky factor */ inline void symbolic_rev(matrix_cl<var>& L) { L.adj() = transpose(L.val()) * L.adj(); L.adj().triangular_transpose<TriangularMapCL::LowerToUpper>(); L.val() = transpose(tri_inverse(L.val())); L.adj() = L.val() * transpose(L.val() * L.adj()); L.adj().triangular_transpose<TriangularMapCL::LowerToUpper>(); } /** * Reverse mode differentiation algorithm using OpenCL * * Reference: * * <NAME>: Differentiation of the Cholesky decomposition, 2016. * */ virtual void chain() { const int packed_size = M_ * (M_ + 1) / 2; std::vector<double> L_adj_cpu(packed_size); matrix_cl<var> L = packed_copy<matrix_cl_view::Lower>(vari_ref_L_, M_); int block_size = M_ / opencl_context.tuning_opts().cholesky_rev_block_partition; block_size = std::max(block_size, 8); block_size = std::min( block_size, opencl_context.tuning_opts().cholesky_rev_min_block_size); // The following is an OpenCL implementation of // the chain() function from the cholesky_block // vari class implementation for (int k = M_; k > 0; k -= block_size) { const int j = std::max(0, k - block_size); const int k_j_ind = k - j; const int m_k_ind = M_ - k; matrix_cl<var> R(k_j_ind, j, matrix_cl_view::Lower); matrix_cl<var> D(k_j_ind, k_j_ind, matrix_cl_view::Lower); matrix_cl<var> B(m_k_ind, j); matrix_cl<var> C(m_k_ind, k_j_ind, matrix_cl_view::Lower); R.sub_block(L, j, 0, 0, 0, k_j_ind, j); D.sub_block(L, j, j, 0, 0, k_j_ind, k_j_ind); B.sub_block(L, k, 0, 0, 0, m_k_ind, j); C.sub_block(L, k, j, 0, 0, m_k_ind, k_j_ind); C.adj() = C.adj() * tri_inverse(D.val()); B.adj() = B.adj() - C.adj() * R.val(); D.adj() = D.adj() - transpose(C.adj()) * C.val(); symbolic_rev(D); R.adj() = R.adj() - transpose(C.adj()) * B.val() - D.adj() * R.val(); D.adj() = diagonal_multiply(D.adj(), 0.5); L.adj().sub_block(R.adj(), 0, 0, j, 0, k_j_ind, j); L.adj().sub_block(D.adj(), 0, 0, j, j, k_j_ind, k_j_ind); L.adj().sub_block(B.adj(), 0, 0, k, 0, m_k_ind, j); L.adj().sub_block(C.adj(), 0, 0, k, j, m_k_ind, k_j_ind); } L.view(matrix_cl_view::Lower); L_adj_cpu = packed_copy(L); for (size_type j = 0; j < packed_size; ++j) { vari_ref_A_[j]->adj_ += L_adj_cpu[j]; } } }; #endif /** * Reverse mode specialization of Cholesky decomposition * * Internally calls Eigen::LLT rather than using * stan::math::cholesky_decompose in order to use an inplace decomposition. * * Note chainable stack varis are created below in Matrix<var, -1, -1> * * @param A Matrix * @return L Cholesky factor of A */ template <typename T, require_eigen_vt<is_var, T>* = nullptr> inline Eigen::Matrix<var, T::RowsAtCompileTime, T::ColsAtCompileTime> cholesky_decompose(const T& A) { Eigen::Matrix<double, T::RowsAtCompileTime, T::ColsAtCompileTime> L_A( value_of_rec(A)); check_not_nan("cholesky_decompose", "A", L_A); #ifdef STAN_OPENCL L_A = cholesky_decompose(L_A); #else check_symmetric("cholesky_decompose", "A", L_A); Eigen::LLT<Eigen::Ref<Eigen::MatrixXd>, Eigen::Lower> L_factor(L_A); check_pos_definite("cholesky_decompose", "A", L_factor); #endif // Memory allocated in arena. // cholesky_scalar gradient faster for small matrices compared to // cholesky_block vari* dummy = new vari(0.0, false); Eigen::Matrix<var, T::RowsAtCompileTime, T::ColsAtCompileTime> L(A.rows(), A.cols()); if (L_A.rows() <= 35) { cholesky_scalar* baseVari = new cholesky_scalar(A, L_A); size_t accum = 0; size_t accum_i = accum; for (size_type j = 0; j < L.cols(); ++j) { for (size_type i = j; i < L.cols(); ++i) { accum_i += i; size_t pos = j + accum_i; L.coeffRef(i, j).vi_ = baseVari->vari_ref_L_[pos]; } for (size_type k = 0; k < j; ++k) { L.coeffRef(k, j).vi_ = dummy; } accum += j; accum_i = accum; } } else { #ifdef STAN_OPENCL if (L_A.rows() > opencl_context.tuning_opts().cholesky_size_worth_transfer) { cholesky_opencl* baseVari = new cholesky_opencl(A, L_A); internal::set_lower_tri_coeff_ref(L, baseVari->vari_ref_L_); } else { cholesky_block* baseVari = new cholesky_block(A, L_A); internal::set_lower_tri_coeff_ref(L, baseVari->vari_ref_L_); } #else cholesky_block* baseVari = new cholesky_block(A, L_A); internal::set_lower_tri_coeff_ref(L, baseVari->vari_ref_L_); #endif } return L; } } // namespace math } // namespace stan #endif
Acid-base balance in field cases of bovine babesiosis Correspondence to Dr Doherty BOVINE babesiosis is a tickborne disease of worldwide economic importance characterised by fever, haemoglobinuria and anaemia. In Ireland, the aetiological agent is the intraerythrocytic protozoan parasite, Babesia divergens, which is transmitted by the hard tick, Ixodes ricinus. The disease is particularly prevalent in the west of Ireland where the high annual rainfall and rough grazing areas provide a suitable habitat for the vector (Gray and Harte 1985). Infection with the parasite is also common in parts of south-west England (Soldan 1999) where high mortality rates have occasionally been recorded (Cranwell 1994). Metabolic acidosis has been reported in dogs infected with Babesia canis (Button 1976, Malherbe and others 1976). In contrast, alkalaemia characterised by elevated blood bicarbonate and base levels was recorded in a small group of cattle experimentally infected with Babesia bovis (Wright and others 1982). This short communication describes a study carried out to determine the acid-base status in field cases of bovine babesiosis, using both urine pH and blood gas analysis. To the authors' knowledge, there have been no previous studies of the acid-base status in cattle infected with B divergens. Samples were collected from cattle on smallholdings in west County Clare along the Atlantic seaboard, where babesiosis is endemic and the predominantly rough pasture provides an ideal tick habitat. The study sites were the farms of clients of two cooperating veterinary practices. One hundred and forty-seven cattle were included in the study and had a haemoglobinuria consistent with a clinical diagnosis of babesiosis, and all were positive for B divergens on a Leishman-stained blood smear. All of the clinically affected cattle and 92 healthy animals used as controls were between one and three years of age, were at pasture at the time of sampling, and all samples were collected between May and June over a two-year period. The urinalysis strips (BM-Test-8; Boehringer Mannheim) used in the study were initially tested for pH on a range of buffer solutions, on urine collected from nine clinical cases of babesiosis and on urine collected from 16 healthy cattle, and the results were compared with those obtained using a benchtop pH meter (Orion pH meter, model 420 A; Allometrics) and a portable pH meter (pH Pro; Rototherm). Using a chisquared analysis, the difference in pH values recorded using the pH meters and the urinalysis strips was not statistically significant (P>0-05).
def subtract_spike_train(self, spt): present_units = np.unique(spt[:, 1]) for i in present_units: conv_res_len = self.n_time * 2 - 1 unit_sp = spt[spt[:, 1] == i, :] spt_idx = np.arange(0, conv_res_len) + unit_sp[:, :1] unit_idx = self.unit_overlap[i] idx = np.ix_(unit_idx, spt_idx.ravel()) self.obj[idx] -= np.tile( 2 * self.pairwise_conv[self.up_up_map[i]], len(unit_sp)) self.enforce_refractory(spt)
// copied from https://github.com/developit/preact-worker-demo/blob/bac36d7c34b241e4c041bcbdefaef77bcc5f367e/src/lib/undom.js import { assign, toLower, splice, findWhere, setImmediate, createAttributeFilter, } from './util'; /* const NODE_TYPES = { ELEMENT_NODE: 1, ATTRIBUTE_NODE: 2, TEXT_NODE: 3, CDATA_SECTION_NODE: 4, ENTITY_REFERENCE_NODE: 5, COMMENT_NODE: 6, PROCESSING_INSTRUCTION_NODE: 7, DOCUMENT_NODE: 9 }; */ export interface UndomOptions { /** * The list of event names that should be added to the Node class. */ builtinEvents?: string[]; } /** Create a minimally viable DOM Document * @returns {Document} document */ export default function undom(options: UndomOptions = {}): globalThis.Document { let observers = [] as MutationObserver[], pendingMutations = false; let pauseMutations = false; class Node { nodeType: number; nodeName: string; childNodes: Node[]; parentNode: Node; children?: Element[]; constructor(nodeType: number, nodeName: string) { this.nodeType = nodeType; this.nodeName = nodeName; this.childNodes = []; } get nextSibling(): Node { if (this.parentNode) { let siblingIndex = this.parentNode.childNodes.indexOf(this) + 1; if (siblingIndex < this.parentNode.childNodes.length) { return this.parentNode.childNodes[siblingIndex]; } } return null; } get previousSibling(): Node { if (this.parentNode) { let siblingIndex = this.parentNode.childNodes.indexOf(this) - 1; if (siblingIndex >= 0) { return this.parentNode.childNodes[siblingIndex]; } } return null; } appendChild(child: Node) { try { if (child.parentNode === this) { pauseMutations = true; } child.remove(); } finally { if (child.parentNode === this) { pauseMutations = false; } } child.parentNode = this; this.childNodes.push(child); if (this.children && child.nodeType === 1) this.children.push(<Element>child); mutation(this, 'childList', { addedNodes: [child], removedNodes: [], previousSibling: this.childNodes[this.childNodes.length - 2], }); } insertBefore(child: Node, ref: Node) { child.remove(); let i = splice(this.childNodes, ref, child), ref2; if (!ref) { this.appendChild(child); } else { if (~i && child.nodeType === 1) { while ( (i < this.childNodes.length && (ref2 = this.childNodes[i]).nodeType !== 1) || ref === child ) i++; if (ref2) splice(this.children, ref, child); } child.parentNode = this; mutation(this, 'childList', { addedNodes: [child], removedNodes: [], nextSibling: ref, }); } } replaceChild(child: Node, ref: Node) { if (ref.parentNode === this) { this.insertBefore(child, ref); ref.remove(); } } removeChild(child: Node) { let i = splice(this.childNodes, child); child.parentNode = null; if (child.nodeType === 1) { splice(this.children, child); } mutation(this, 'childList', { addedNodes: [], removedNodes: [child], previousSibling: this.childNodes[i - 1], nextSibling: this.childNodes[i], }); } remove() { if (this.parentNode) { this.parentNode.removeChild(this); } } } if (options.builtinEvents) { for (let event of options.builtinEvents) { Object.defineProperty(Node.prototype, event, { get: () => {}, set: () => {}, enumerable: true, }); } } class Text extends Node { private _data: string; data: string; constructor(text: string) { super(3, '#text'); // TEXT_NODE // this.textContent = this.nodeValue = text; this._data = text; Object.defineProperty(this, 'data', { get: () => this._data, set: (text) => { const oldValue = this._data; this._data = text; mutation(this, 'characterData', { oldValue }); }, enumerable: true, }); } get textContent() { return this.data; } set textContent(value) { this.data = value; } get nodeValue() { return this.data; } set nodeValue(value) { this.data = value; } } interface Attr {} class Element extends Node { private _style: any; attributes: Attr[]; style: any; __handlers: any; namespace: string; private _createStyleProxy(value: any): any { return new Proxy(value, { set: (target, key, value) => { let result = Reflect.set(target, key, value); this.setAttribute('style', this._style); return result; }, }); } constructor(nodeType: number, nodeName: string) { super(nodeType || 1, nodeName); // ELEMENT_NODE this.attributes = []; this.children = []; this.__handlers = {}; this._style = this._createStyleProxy({}); Object.defineProperty(this, 'style', { get: () => this._style, set: (style) => { this._style = this._createStyleProxy(style); this.setAttribute('style', this._style); }, enumerable: true, }); Object.defineProperty(this, 'className', { set: (val) => { this.setAttribute('class', val); }, get: () => this.getAttribute('style'), }); Object.defineProperty(this.style, 'cssText', { set: (val) => { this.setAttribute('style', val); }, get: () => this.getAttribute('style'), }); } setAttribute(key: string, value: any) { this.setAttributeNS(null, key, value); } getAttribute(key: string) { return this.getAttributeNS(null, key); } removeAttribute(key: string) { this.removeAttributeNS(null, key); } setAttributeNS(ns: string, name: string, value: any) { let attr = findWhere( this.attributes, createAttributeFilter(ns, name) ), oldValue = attr && attr.value; if (!attr) this.attributes.push((attr = { ns, name })); attr.value = typeof value === 'object' ? { ...value } : String(value ?? ''); mutation(this, 'attributes', { attributeName: name, attributeNamespace: ns, oldValue, }); } getAttributeNS(ns: string, name: string) { let attr = findWhere( this.attributes, createAttributeFilter(ns, name) ); return attr && attr.value; } removeAttributeNS(ns: string, name: string) { splice(this.attributes, createAttributeFilter(ns, name)); mutation(this, 'attributes', { attributeName: name, attributeNamespace: ns, oldValue: this.getAttributeNS(ns, name), }); } addEventListener(type: string, handler: (event: Event) => void) { (this.__handlers[type] || (this.__handlers[type] = [])).push( handler ); mutation(null, 'event_listener', { listenerName: type, listenerDelta: 1, }); } removeEventListener(type: string, handler: (event: Event) => void) { let index = splice(this.__handlers[type], handler, undefined, true); if (index >= 0) { mutation(null, 'event_listener', { listenerName: type, listenerDelta: -1, }); } } dispatchEvent(event: Event) { let t = (event.currentTarget = this as Element), c = event.cancelable, l, i; do { l = t.__handlers && t.__handlers[toLower(event.type)]; if (l) for (i = l.length; i--; ) { if ((l[i].call(t, event) === false || event._end) && c) break; } } while ( event.bubbles && !(c && event._stop) && (event.target = t = t.parentNode as Element) ); return !event.defaultPrevented; } } class SVGElement extends Element {} class Document extends Element { defaultView: any; body: Element; head: Element; constructor() { super(9, '#document'); // DOCUMENT_NODE } } class Event { type: string; bubbles: boolean; cancelable: boolean; defaultPrevented: boolean; currentTarget: any; target: any; _stop: boolean; _end: boolean; constructor( type: string, opts: { bubbles?: boolean; cancelable?: boolean } ) { this.type = type; this.bubbles = !!opts?.bubbles; this.cancelable = !!opts?.cancelable; } stopPropagation() { this._stop = true; } stopImmediatePropagation() { this._end = this._stop = true; } preventDefault() { this.defaultPrevented = true; } } interface MutationRecord { target: Node; type: string; addedNodes: Node[]; removedNodes: Node[]; previousSibling: Node; nextSibling: Node; oldValue: any; attributeName: string; attributeNamespace: string; } function mutation( target: Node, type: string, record: Partial<MutationRecord> & { /** * The name of the event listener. */ listenerName?: string; /** * The number of event listeners that were added (positive number) or removed (negative number). */ listenerDelta?: number; } ) { if (pauseMutations) { return; } record.target = target; record.type = type; for (let i = observers.length; i--; ) { let ob = observers[i], match = (!target && ob._options.subtree) || target === ob._target; if (!match && ob._options.subtree) { do { if ((match = target === ob._target)) break; } while ((target = target.parentNode)); } if (match) { ob._records.push(record as MutationRecord); if (!pendingMutations) { pendingMutations = true; setImmediate(flushMutations); } } } } function flushMutations() { pendingMutations = false; for (let i = observers.length; i--; ) { let ob = observers[i]; if (ob._records.length) { ob.callback(ob.takeRecords()); } } } class MutationObserver { callback: (records: MutationRecord[]) => void; _records: MutationRecord[]; _target: any; _options: any; constructor(callback: (records: MutationRecord[]) => void) { this.callback = callback; this._records = []; } observe(target: any, options: any) { this.disconnect(); this._target = target; this._options = options || {}; observers.push(this); } disconnect() { this._target = null; splice(observers, this); } takeRecords() { return this._records.splice(0, this._records.length); } } function createElement(type: string) { return new Element(null, String(type).toUpperCase()); } function createElementNS(ns: string, type: string) { let element = createElement(type); element.namespace = ns; return element; } function createTextNode(text: string) { return new Text(text); } function createDocument() { let document = new Document(); assign( document, (document.defaultView = { document, MutationObserver, Document, Node, Text, Element, SVGElement, Event, }) ); assign(document, { documentElement: document, createElement, createElementNS, createTextNode, }); document.appendChild((document.head = createElement('head'))); document.appendChild((document.body = createElement('body'))); return document; } return createDocument() as any; }
/// <reference types="node" /> import Timer = NodeJS.Timer; export default class TaskRunner { id: Timer; fn: Function; callbacks: (() => any)[]; constructor(fn?: Function); delay(delay: number, fn?: Function, callback?: () => any): Promise<unknown>; run(interval: number, fn?: Function, callback?: () => any): Promise<unknown>; start(once: boolean, interval: number, fn?: Function, callback?: () => any): Promise<unknown>; cancel(): this; }
/// Obtains the node and attributes of an underlying file immediately after its creation. /// /// `writable` and `state` are the properties of the node, passed in as arguments because we /// have to hold the node locked already. /// /// `path` and `name` are the path to the underlying file and the basename to lookup in the /// directory, respectively. It is expected that the basename of `path` matches `name`. /// /// `exp_type` is the type of the node we expect to find for the just-created file. If the /// node doesn't match this type, it means we encountered a race on the underlying file system /// and we fail the lookup. (This is an artifact of how we currently implement this function /// as this condition should just be impossible.) fn post_create_lookup(writable: bool, state: &mut MutableDir, path: &Path, name: &OsStr, exp_type: fuse::FileType, ids: &IdGenerator, cache: &dyn Cache) -> NodeResult<(ArcNode, fuse::FileAttr)> { debug_assert_eq!(path.file_name().unwrap(), name); // TODO(https://github.com/bazelbuild/sandboxfs/issues/43): We abuse lookup here to handle // the node creation and the child insertion into the directory, but we shouldn't do this // because lookup performs an extra stat that we should not be issuing. But to resolve this // we need to be able to synthesize the returned attr, which means we need to track ctimes // internally. match Dir::lookup_locked(writable, state, name, ids, cache) { Ok((node, attr)) => { if node.file_type_cached() != exp_type { warn!("Newly-created file {} was replaced or deleted before create finished", path.display()); return Err(KernelError::from_errno(errno::Errno::EIO)); } Ok((node, attr)) }, Err(e) => { if let Err(e) = fs::remove_file(&path) { warn!("Failed to clean up newly-created {}: {}", path.display(), e); } Err(e) } } }
import { Injectable } from '@angular/core'; import { Observable, of, throwError } from 'rxjs'; import { HttpClient, HttpHeaders, HttpErrorResponse } from '@angular/common/http'; import { catchError, tap, map } from 'rxjs/operators'; import { environment } from 'src/environments/environment.prod'; const httpOptions = { headers: new HttpHeaders({'Content-Type': 'application/json'}) }; const baseUrl = environment.baseUrl; @Injectable({ providedIn: 'root' }) export class RestApiService { url: string; constructor(private http: HttpClient) { } private handleError(error: HttpErrorResponse) { if (error.error instanceof ErrorEvent) { // A client-side or network error occurred. Handle it accordingly. console.error('An error occurred:', error.error.message); } else { // The backend returned an unsuccessful response code. // The response body may contain clues as to what went wrong, console.error( `Backend returned code ${error.status}, ` + `body was: ${error.error}`); } // return an observable with a user-facing error message return throwError('Something bad happened; please try again later.'); } private extractData(res: Response) { let body = res; console.log('@@@ Response: '+JSON.stringify(body)); return body || { }; } // ------------------------------------ my coding starts from here ---------------------------------- // get user by userid getuserbyuserid(userid): Observable<any>{ return this.http.get(baseUrl+'getuserbyuserid/'+userid, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // student registration registernewstudent(data): Observable<any>{ return this.http.post(baseUrl+'registernewstudent', data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // student list of a specific teacher getallstudentsbyteacher(_userid):Observable<any>{ this.url = baseUrl+'getallstudentsbyteacher/'+_userid; this.show(this.url); return this.http.get(this.url, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // get baselinetest getbaselinetestquestionset(data):Observable<any>{ this.url = baseUrl+'getbaselinetestquestionset'; this.show(this.url); return this.http.post(this.url, data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // setlevel setlevelbyid(data): Observable<any>{ return this.http.post(baseUrl+'setlevelbyid', data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // update level updatelevelbyid(id,data): Observable<any>{ return this.http.put(baseUrl+'updatelevelbyid/'+id, data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // get all student list by teacher id getallstudentsbyteacherid(userid):Observable<any>{ this.url = baseUrl+'getallstudentsbyteacherid/'+userid; this.show(this.url); return this.http.get(this.url, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // get attendance by teacher id and date getattendanceofteacherbydate(userid, attendancedate):Observable<any>{ this.url = baseUrl+'getattendanceofteacherbydate/'+userid+'/'+attendancedate; this.show(this.url); return this.http.get(this.url, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // save attendance saveattendance(data): Observable<any>{ return this.http.post(baseUrl+'saveattendance', data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // view payment records getalltchpaymentdetailsbystudentid(studentid):Observable<any>{ this.url = baseUrl+'getalltchpaymentdetailsbystudentid/'+studentid; this.show(this.url); return this.http.get(this.url, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // make payment savetchpaymentdetails(data): Observable<any>{ return this.http.post(baseUrl+'savetchpaymentdetails', data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // get tch assessment gettchassessment(program, clas, stage, subject):Observable<any>{ this.url = baseUrl+'gettchassessment/'+program+'/'+clas+'/'+stage+'/'+subject; this.show(this.url); return this.http.get(this.url, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // get tch assessmenttest gettchassessmenttest(studentid, program, clas, stage, subject):Observable<any>{ this.url = baseUrl+'gettchassessmenttest/'+studentid+'/'+program+'/'+clas+'/'+stage+'/'+subject; this.show(this.url); return this.http.get(this.url, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // save assessment test for each student createtchassessmenttest(data): Observable<any>{ return this.http.post(baseUrl+'createtchassessmenttest', data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // get distinct activities getmasteractivities(program, subject, month, week): Observable<any>{ return this.http.get(baseUrl+'getmasteractivities/'+program+'/'+subject+'/'+month+'/'+week, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // get activity details getmasteractivitiydetails(program, subject, month, week, activity): Observable<any>{ return this.http.get(baseUrl+'getmasteractivitiydetails/'+program+'/'+subject+'/'+month+'/'+week+'/'+activity, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // get master activity by userid gettchactivitiydetails(userid, program, subject, month, week, activity): Observable<any>{ return this.http.get(baseUrl+'gettchactivitiydetails/'+userid+'/'+program+'/'+subject+'/'+month+'/'+week+'/'+activity, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // get tch activity by userid gettchactivitybyuser(userid, program, subject, month, week): Observable<any>{ return this.http.get(baseUrl+'gettchactivitybyuser/'+userid+'/'+program+'/'+subject+'/'+month+'/'+week, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // save tch activity savetchactivity(data): Observable<any>{ return this.http.post(baseUrl+'savetchactivity', data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // get fcm token all getallfcmtokenids(): Observable<any>{ return this.http.get(baseUrl+'getallfcmtokenids', httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // get fcm token by userid getfcmtokenidbyuserid(userid): Observable<any>{ return this.http.get(baseUrl+'getfcmtokenidbyuserid/'+userid, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // save fcm token createnewfcmtokenid(data): Observable<any>{ return this.http.post(baseUrl+'createnewfcmtokenid', data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // update fcm token updatefcmtokenid(id, data): Observable<any>{ return this.http.put(baseUrl+'updatefcmtokenid/'+id, data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // Teacher training // get all modules getalltrainingmodules(): Observable<any>{ return this.http.get(baseUrl+'getalltrainingmodules', httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // get all submodules of a module getalltrainingsubmodules(moduleid): Observable<any>{ return this.http.get(baseUrl+'getalltrainingsubmodules/'+moduleid, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // get all training contents getalltrainingcontents(moduleid, submoduleid): Observable<any>{ return this.http.get(baseUrl+'getalltrainingcontents/'+moduleid+'/'+submoduleid, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // authenticate user authenticateuser(data): Observable<any>{ return this.http.post(baseUrl+'authenticateuser', data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // getcurrentdate getcurrentdate():Observable<any>{ this.url = baseUrl+'getcurrentdate'; return this.http.get(this.url, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // getallcentersallocatedbyuserid getallcentersallocatedbyuserid(_userid):Observable<any>{ this.url = baseUrl+'getallcentersallocatedbyuserid/'+_userid; this.show(this.url); return this.http.get(this.url, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // getcenter feedback getallcenterfeedback():Observable<any>{ this.url = baseUrl+'getallcenterfeedback'; return this.http.get(this.url, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // savedailyinfo savedailyinfo(data): Observable<any>{ return this.http.post(baseUrl+'savedailyinfo', data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // savecenterimage savecenterimage(data): Observable<any>{ return this.http.post(baseUrl+'savecenterimage', data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // savegeolocation savegeolocation(data): Observable<any>{ return this.http.post(baseUrl+'savegeolocation', data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // save manager center feedback createcenterfeedbackmgr(data): Observable<any>{ return this.http.post(baseUrl+'createcenterfeedbackmgr', data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // get all paymentinfo getallpaymentinfo():Observable<any>{ this.url = baseUrl+'getallpaymentinfo'; return this.http.get(this.url, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // save manager paymentinfo createpaymentinfomgr(data): Observable<any>{ return this.http.post(baseUrl+'createpaymentinfomgr', data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // get all assessment getallassessment():Observable<any>{ this.url = baseUrl+'getallassessment'; return this.http.get(this.url, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // save manager assessment createassessmentmgr(data): Observable<any>{ return this.http.post(baseUrl+'createassessmentmgr', data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // get all community getallcommunityvisit():Observable<any>{ this.url = baseUrl+'getallcommunityvisit'; return this.http.get(this.url, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // save manager community createcommunitymgr(data): Observable<any>{ return this.http.post(baseUrl+'createcommunitymgr', data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // get all issue getallissues():Observable<any>{ this.url = baseUrl+'getallissues'; return this.http.get(this.url, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // save manager issue createissuesmgr(data): Observable<any>{ return this.http.post(baseUrl+'createissuesmgr', data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // save daily expenses savedailyexpense(data): Observable<any>{ return this.http.post(baseUrl+'savedailyexpense', data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // get messages by userid getmessagesbyuserid(userid):Observable<any>{ this.url = baseUrl+'getmessagesbyuserid/'+userid; return this.http.get(this.url, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } // update message status by id updatemessagebyid(id, data):Observable<any>{ this.url = baseUrl+'updatemessagebyid/'+id; return this.http.put(this.url, data, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } //------------------------------------------------------------------------- getClassroom(): Observable<any> { return this.http.get(baseUrl, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } getClassroomById(id: string): Observable<any> { const url = `${baseUrl}/${id}`; return this.http.get(url, httpOptions).pipe( map(this.extractData), catchError(this.handleError)); } postClassroom(data): Observable<any> { const url = `${baseUrl}/add_with_students`; return this.http.post(url, data, httpOptions) .pipe( catchError(this.handleError) ); } updateClassroom(id: string, data): Observable<any> { const url = `${baseUrl}/${id}`; return this.http.put(url, data, httpOptions) .pipe( catchError(this.handleError) ); } deleteClassroom(id: string): Observable<{}> { const url = `${baseUrl}/${id}`; return this.http.delete(url, httpOptions) .pipe( catchError(this.handleError) ); } show(url: string){ console.log('@@@ url: '+url); } }
/** * It is responsible for setting the rule for group by sentence. * * @param grammarBuilder ApexGrammarBuilder parameter. */ private static void groupBySentence(LexerfulGrammarBuilder grammarBuilder) { grammarBuilder.rule(GROUP_BY_SENTENCE).is( GROUP, BY, grammarBuilder.firstOf( SOQL_NAME, GROUP_BY_TYPES, DATE_METHOD_EXPR, AGGREGATE_EXPR ), grammarBuilder.zeroOrMore(COMMA, grammarBuilder.firstOf( SOQL_NAME, GROUP_BY_TYPES, DATE_METHOD_EXPR, AGGREGATE_EXPR )) ); }
<filename>src/str/Expression.java<gh_stars>1-10 package str; import java.util.Arrays; public class Expression { public static void main(String[] args) { String exp="1 + 2 - 3 * 5554 / 15 "; System.out.println(Arrays.toString(exp.split("\\D+"))); System.out.println(Arrays.toString(exp.split("\\s*\\d+\\s*"))); System.out.println(Arrays.asList("We think we can".toLowerCase().split("we"))); System.out.println("We think we can".toLowerCase().replace("we","you")); } }
s = input() def main(string): c = "keyence" head = string[:7] idx = 0 for s in head: if s == c[idx]: idx += 1 else: break if idx == 7: print("YES") return tail = string[-7+idx:] for s in tail: if s == c[idx]: idx += 1 else: break if idx == 7: print("YES") else: print("NO") main(s)
<reponame>nvdungx/algo1 #ifndef __BST_H__ #define __BST_H__ #include <iostream> #include <string> #include <list> #include <vector> #include <memory> #include <iterator> template <typename KeyType = std::string, typename ValueType = int> class Node { public: KeyType key; ValueType value; bool color; uint32_t count; std::shared_ptr<Node<KeyType, ValueType>> left; std::shared_ptr<Node<KeyType, ValueType>> right; public: Node() : color(false), count(0), left(nullptr), right(nullptr) {} Node(const KeyType &_key, const ValueType &_val) : key(_key), value(_val), color(false), count(0), left(nullptr), right(nullptr) {} Node(const KeyType &_key, const ValueType &_val, bool _color, uint32_t _count, const std::shared_ptr<Node<KeyType, ValueType>> &_left, const std::shared_ptr<Node<KeyType, ValueType>> &_right) : key(_key), value(_val), color(_color), count(_count), left(_left), right(_right) {} ~Node() {} }; template <typename KeyType = std::string, typename ValueType = int> class BST { private: std::shared_ptr<Node<KeyType, ValueType>> root; public: BST() {} ~BST() {} // put key-value pair into the table (remove key from table if value is null) void put(const KeyType &key, const ValueType &val) { } // value paired with key (null if key is absent) ValueType &get(const KeyType &key) { } // remove key (and its value) from table void remove(const KeyType &key) { } // is there a value paired with key? bool contains(const KeyType &key) { return (get(key) != nullptr); } // is the table empty? bool is_empty() { return (size() == 0); } // number of key-value pairs in the table uint32_t size() { return root.count; } // all the keys in the table // std::iterator keys() // smallest key KeyType min() { } // largest key KeyType max() { } // largest key less than or equal to key KeyType floor(const KeyType &key) { } // smallest key greater than or equal to key KeyType ceiling(const KeyType &key) { } // number of keys less than key int rank(const KeyType &key) { } // key of rank k KeyType select(int k) { } // delete smallest key void removeMin() { } // delete largest key void removeMax() { } // number of keys in [lo..hi] int size(const KeyType &lo, const KeyType &hi) { } // keys in [lo..hi], in sorted order // Iterable<Key> keys(const KeyType &lo, const KeyType &hi) ValueType &operator[](const KeyType &key) { } }; #endif // __BST_H__
Getty Ivanka Trump’s clothing brand does not provide industry-standard oversight for its factory workers, the Washington Post found. Share Pinterest Email “The time to change the narrative around women and work once and for all is long overdue; in fact, it’s become my life’s mission,” Ivanka Trump wrote in her book “Women Who Work.” While Trump works on the “narrative,” the thousands of women in Bangladesh, Indonesia and China who make garments for her eponymous clothing brand work in under-regulated conditions. Trump has not ensured that industry-standard regulation of worker conditions takes place at factories contracted with her company, according to an investigation from the Washington Post. While major brand’s like Levi’s and Adidas, and smaller brands like Everlane, routinely employ investigators to check on treatment of workers at their factories in poor countries, Ivanka Trump’s brand does not. The company is also shirking the emerging trend of clothing companies disclosing their factory locations. While her father boasts about bringing jobs back to the U.S., Ivanka Trump faces scrutiny over her company’s use of overseas factories. By basing her factories overseas, Trump is able to save over 70% on manufacturing costs. “As a leader and a mother, I feel it’s as much my responsibility to cultivate an environment that supports people… as it is to post profits,” Trump wrote in her book. “One cannot suffer at the expense of the other — they go hand in hand.” Contact Ari Feldman at feldman@forward.com or on Twitter @aefeldman.
<reponame>kratos47mhs/mall-swarm package com.macro.mall.dto; import io.swagger.annotations.ApiModelProperty; import lombok.Data; import lombok.EqualsAndHashCode; /** * Get S3 upload file authorization return result * Created by macro on 2018/5/17. */ @Data @EqualsAndHashCode(callSuper = false) public class AmazonS3PolicyResult { @ApiModelProperty("User ID used in access authentication") private String accessKeyId; @ApiModelProperty("User form upload policy, string encoded by base64") private String policy; @ApiModelProperty("Strings signed on the policy") private String signature; @ApiModelProperty("Upload folder path prefix") private String dir; @ApiModelProperty("access domain for oss external services") private String host; @ApiModelProperty("Callback settings after successful upload") private String callback; }
// Get retrieves an Entry using its key. func (o *OrderedMap) Get(key string) (iface.IPFSLogEntry, bool) { o.lock.RLock() defer o.lock.RUnlock() val, exists := o.values[key] return val, exists }
/** * Convert the key into a proper key list. * NoSQL uses a List of key values for major keys, * this allows for a special key syntax to be used, or a list. * "[<key1>,<key2>,..]" */ protected List createMajorKey(Object key) { List majorKeys = null; if (key instanceof List) { majorKeys = (List)key; } else { majorKeys = new ArrayList<String>(); String keyString = key.toString(); if ((keyString.length() > 2) && (keyString.charAt(0) == '[') && (keyString.charAt(keyString.length() - 1) == ']')) { int startIndex = 1; while (startIndex < (keyString.length() - 1)) { int endIndex = keyString.indexOf(',', startIndex); if (endIndex == -1) { endIndex = keyString.length() - 1; } String nextKey = keyString.substring(startIndex, endIndex); majorKeys.add(nextKey); startIndex = endIndex + 1; } } else { majorKeys.add(keyString); } } return majorKeys; }
/* * Copyright 2021 Larder Software Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import React from 'react'; import { configApiRef, useApi } from '@backstage/core-plugin-api'; import { Table, TableColumn, Link, Progress } from '@backstage/core-components'; import Alert from '@material-ui/lab/Alert'; import OpenInNew from '@material-ui/icons/OpenInNew'; import { makeStyles } from '@material-ui/core'; import { useAsync } from 'react-use'; import { bugsnagApiRef } from '../..'; import { BugsnagError, Project } from '../../api/types'; import { DateTime } from 'luxon'; const useStyles = makeStyles({ iconClass: { verticalAlign: 'middle', } }); const getDetailsUrl = (errorId: string, errorClass: string, organisationName: string, projectName: string) => { // eslint-disable-next-line react-hooks/rules-of-hooks const classes = useStyles(); const url = `https://app.bugsnag.com/${organisationName}/${projectName}/errors/${errorId}`; return ( <Link to={url}> <OpenInNew onClick={() => window.open(url, '_blank')} aria-label="View" fontSize="small" className={classes.iconClass} /> {errorClass} </Link> ); } export const DenseTable = ({ errors, organisationName, projectName }: { errors: BugsnagError[], organisationName: string, projectName: string }) => { const columns: TableColumn[] = [ { title: '', field: 'class' }, { title: 'Events', field: 'events' }, { title: 'Users', field: 'users' }, { title: 'Stage', field: 'stage' }, { title: 'First seen', field: 'first_seen' }, { title: 'Last seen', field: 'last_seen' }, { title: 'Severity', field: 'severity' }, ]; const data = errors.map(error => { return { severity: error.severity, class: getDetailsUrl(error.id, error.error_class, organisationName, projectName), stage: error.release_stages, events: error.events, id: error.id, project_id: error.project_id, users: error.users, first_seen: DateTime.fromISO(error.first_seen).toLocaleString(), last_seen: DateTime.fromISO(error.last_seen).toLocaleString() }; }); return ( <Table title="Errors overview" options={{ search: true, paging: true }} columns={columns} data={data} /> ); }; export const ErrorsTable = ({ organisationName, project }: { organisationName: string, project: Project }) => { const api = useApi(bugsnagApiRef); const configApi = useApi(configApiRef); const perPage = configApi?.getOptionalNumber('bugsnag.resultsPerPage'); const { value, loading, error } = useAsync( async () => await api.fetchErrors({ projectId: project.id, perPage, }) ); if (loading) { return <Progress />; } else if (error) { return <Alert severity="error">{error.message}</Alert>; } return <DenseTable organisationName={organisationName} projectName={project.slug} errors={value || []} />; };
def one_time_step_local(self, f, fnew, m): with_rel_velocity = True if self.rel_vel_symb else False code = [self.transport_local(f, fnew)] f2m = self.f2m_local(fnew, m, with_rel_velocity) if isinstance(f2m, list): code.extend(f2m) else: code.append(f2m) if self.source_eq: code.extend(self.source_term_local(m)) code.append(self.relaxation_local(m, with_rel_velocity)) if self.source_eq: code.extend(self.source_term_local(m)) code.append(self.m2f_local(m, fnew, with_rel_velocity)) return code
def _log_sink_pb_to_mapping(sink_pb): return { 'name': sink_pb.name, 'destination': sink_pb.destination, 'filter': sink_pb.filter, }
<gh_stars>10-100 package command import ( "flag" "fmt" "strings" "github.com/funkygao/gocli" ) type Logstash struct { Ui cli.Ui Cmd string } func (this *Logstash) Run(args []string) (exitCode int) { cmdFlags := flag.NewFlagSet("logstash", flag.ContinueOnError) cmdFlags.Usage = func() { this.Ui.Output(this.Help()) } if err := cmdFlags.Parse(args); err != nil { return 1 } this.showSampleConfig() return } func (this *Logstash) showSampleConfig() { config := ` input { file { path => "/var/wd/ehaproxy/ehaproxy.log" type => "ehaproxy" } file { path => "/var/wd/ehaproxy/panic" type => "ehaproxy_panic" } file { path => "/var/wd/ehaproxy/logs/haproxy.log" type => "haproxy" } file { path => "/var/wd/kateway/kateway.log" type => "kateway" } file { path => "/var/wd/kateway/panic" type => "kateway_panic" } file { path => "/var/wd/kateway/audit/pub_audit.log" type => "pubaudit" } file { path => "/var/wd/kateway/audit/sub_audit.log" type => "subaudit" } } filter { multiline { pattern => "^201" # e,g. this line begins with 2017-01-22 what => "previous" negate => true } } output { if [type] == "pubsub" { http { http_method => "post" url => "http://pub.test.mycorp.com:10191/v1/raw/msgs/cluster/topic" workers => 8 # each worker has 25 http connection, totals: 200 headers => { "User-Agent" => "logstash" } } } else if [type] == "pubaudit" { kafka { bootstrap_servers => "k<EMAIL>:11003,k11003b.mycorp.kfk.com:11003" topic_id => "pubaudit" } } else if [type] == "subaudit" { kafka { bootstrap_servers => "k<EMAIL>a.mycorp.kfk.com:11003,k11003b.mycorp.kfk.com:11003" topic_id => "subaudit" } } else { kafka { bootstrap_servers => "k11003a.mycorp.kfk.com:11003,k11003b.mycorp.kfk.com:11003" topic_id => "pubsub_log" metadata_max_age_ms => 300000 workers => 1 retries => 1 } } }` this.Ui.Output(strings.TrimSpace(config)) } func (*Logstash) Synopsis() string { return "Sample configuration for logstash" } func (this *Logstash) Help() string { help := fmt.Sprintf(` Usage: %s logstash [options] %s `, this.Cmd, this.Synopsis()) return strings.TrimSpace(help) }
<filename>vendor/github.com/anacrolix/torrent/request-strategy-impls.go package torrent import ( "github.com/anacrolix/torrent/metainfo" request_strategy "github.com/anacrolix/torrent/request-strategy" "github.com/anacrolix/torrent/storage" ) type requestStrategyInput struct { cl *Client capFunc storage.TorrentCapacity } func (r requestStrategyInput) Torrent(ih metainfo.Hash) request_strategy.Torrent { return requestStrategyTorrent{r.cl.torrents[ih]} } func (r requestStrategyInput) Capacity() (int64, bool) { if r.capFunc == nil { return 0, false } return (*r.capFunc)() } func (r requestStrategyInput) MaxUnverifiedBytes() int64 { return r.cl.config.MaxUnverifiedBytes } var _ request_strategy.Input = requestStrategyInput{} // Returns what is necessary to run request_strategy.GetRequestablePieces for primaryTorrent. func (cl *Client) getRequestStrategyInput(primaryTorrent *Torrent) (input request_strategy.Input) { return requestStrategyInput{ cl: cl, capFunc: primaryTorrent.storage.Capacity, } } func (t *Torrent) getRequestStrategyInput() request_strategy.Input { return t.cl.getRequestStrategyInput(t) } type requestStrategyTorrent struct { t *Torrent } func (r requestStrategyTorrent) Piece(i int) request_strategy.Piece { return requestStrategyPiece{r.t, i} } func (r requestStrategyTorrent) ChunksPerPiece() uint32 { return r.t.chunksPerRegularPiece() } func (r requestStrategyTorrent) PieceLength() int64 { return r.t.info.PieceLength } var _ request_strategy.Torrent = requestStrategyTorrent{} type requestStrategyPiece struct { t *Torrent i pieceIndex } func (r requestStrategyPiece) Request() bool { return !r.t.ignorePieceForRequests(r.i) } func (r requestStrategyPiece) NumPendingChunks() int { return int(r.t.pieceNumPendingChunks(r.i)) } var _ request_strategy.Piece = requestStrategyPiece{}
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== r"""Training script for UNet-3D.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl import app from absl import flags import tensorflow.compat.v1 as tf from tensorflow.compat.v1 import estimator as tf_estimator from hyperparameters import params_dict import input_reader import tpu_executor import unet_config import unet_model tpu_executor.define_tpu_flags() flags.DEFINE_string( 'mode', 'train', 'Mode to run: train or eval or train_and_eval ' '(default: train)') flags.DEFINE_string('model_dir', None, 'Location of model_dir') flags.DEFINE_string('training_file_pattern', '', 'Location of the train data.') flags.DEFINE_string('eval_file_pattern', '', 'Location of ther eval data') flags.DEFINE_string('config_file', '', 'a YAML file which specifies overrides.') flags.DEFINE_string('params_override', '', 'A JSON-style string that specifies overrides.') flags.DEFINE_integer('min_eval_interval', 180, 'Minimum seconds between evaluations.') flags.DEFINE_integer( 'eval_timeout', None, 'Maximum seconds between checkpoints before evaluation terminates.') FLAGS = flags.FLAGS def run_executer(params, train_input_shapes=None, eval_input_shapes=None, train_input_fn=None, eval_input_fn=None): """Runs Mask RCNN model on distribution strategy defined by the user.""" executer = tpu_executor.TPUEstimatorExecuter( unet_model.unet_model_fn, params, train_input_shapes=train_input_shapes, eval_input_shapes=eval_input_shapes) if FLAGS.mode == 'train': assert train_input_fn is not None results = executer.train(train_input_fn) elif FLAGS.mode == 'eval': assert eval_input_fn is not None results = executer.evaluate(eval_input_fn) elif FLAGS.mode == 'train_and_eval': assert train_input_fn is not None assert eval_input_fn is not None results = executer.train_and_eval(train_input_fn, eval_input_fn) else: raise ValueError('Mode must be one of `train`, `eval`, or `train_and_eval`') return results def main(argv): del argv # Unused. params = params_dict.ParamsDict(unet_config.UNET_CONFIG, unet_config.UNET_RESTRICTIONS) params = params_dict.override_params_dict( params, FLAGS.config_file, is_strict=False) if FLAGS.training_file_pattern: params.override({'training_file_pattern': FLAGS.training_file_pattern}, is_strict=True) if FLAGS.eval_file_pattern: params.override({'eval_file_pattern': FLAGS.eval_file_pattern}, is_strict=True) train_epoch_steps = params.train_item_count // params.train_batch_size eval_epoch_steps = params.eval_item_count // params.eval_batch_size params.override( { 'model_dir': FLAGS.model_dir, 'min_eval_interval': FLAGS.min_eval_interval, 'eval_timeout': FLAGS.eval_timeout, 'tpu_config': tpu_executor.get_tpu_flags(), 'lr_decay_steps': train_epoch_steps, 'train_steps': params.train_epochs * train_epoch_steps, 'eval_steps': eval_epoch_steps, }, is_strict=False) params = params_dict.override_params_dict( params, FLAGS.params_override, is_strict=True) params.validate() params.lock() train_input_fn = None eval_input_fn = None train_input_shapes = None eval_input_shapes = None if FLAGS.mode in ('train', 'train_and_eval'): train_input_fn = input_reader.LiverInputFn( params.training_file_pattern, params, mode=tf_estimator.ModeKeys.TRAIN) train_input_shapes = train_input_fn.get_input_shapes(params) if FLAGS.mode in ('eval', 'train_and_eval'): eval_input_fn = input_reader.LiverInputFn( params.eval_file_pattern, params, mode=tf_estimator.ModeKeys.EVAL) eval_input_shapes = eval_input_fn.get_input_shapes(params) assert train_input_shapes is not None or eval_input_shapes is not None run_executer(params, train_input_shapes=train_input_shapes, eval_input_shapes=eval_input_shapes, train_input_fn=train_input_fn, eval_input_fn=eval_input_fn) if __name__ == '__main__': tf.disable_v2_behavior() app.run(main)
An Islamist organization that distributes German-language copies of the Quran to passers-by has been banned by the German government who accused it of recruiting would-be jihadists to fight in Syria and Iraq. The True Religion organization, also known as Read, was outlawed Tuesday when officers raided 190 premises in more than eight German states after months of surveillance of the group, the New York Times reports. The Brief Newsletter Sign up to receive the top stories you need to know right now. View Sample Sign Up Now “The organization brings Islamic jihadists together under the pretext of the harmless distribution of the Quran,” German Interior Minister Thomas de Maizière told reporters in Berlin. He added that the ban had “nothing to do with the alleged freedom of religion.” The True Religion was known for handing out Qurans in pedestrianized areas across the country. According to de Maizière, 140 of the group’s supporters are known to have traveled to the Middle East to fight on behalf of ISIS. [New York Times] Write to Kate Samuelson at kate.samuelson@time.com.
#ifndef GENETICALGORITHMLIB_MEMORYMANAGER_H #define GENETICALGORITHMLIB_MEMORYMANAGER_H #include <vector> #include <cstdio> #include <iostream> #include <set> #include <memory> template <class T> class MemoryManager { private: std::vector<std::shared_ptr<T>> instances_; std::set<int> freeSlots_; int maxMemSize_ = 500; public: MemoryManager(int maxElementSize) { maxMemSize_ = maxElementSize; instances_ = std::vector<std::shared_ptr<T>>(maxMemSize_); // fill nextSlot with empty positions for (int i = 0; i < maxMemSize_; i++) { freeSlots_.insert(i); } } MemoryManager() : MemoryManager(500){}; template <class... ARGS> int create(ARGS... args) { if (freeSlots_.empty()) { return -1; } // grab an available handle from the set int handle = *freeSlots_.begin(); freeSlots_.erase(handle); if (instances_[handle].get() == nullptr) { instances_[handle].reset(new T(args...)); } return handle; } void destroy(int handle) { instances_[handle].reset(); // add this empty slot to nextSlot queue freeSlots_.insert(handle); } std::shared_ptr<T> get(int handle) { return instances_[handle]; } }; #endif //GENETICALGORITHMLIB_MEMORYMANAGER_H
/** * Delegate to the next {@code WebFilter} in the chain. * * @param exchange the current server exchange * @return {@code Mono<Void>} to indicate when request handling is complete */ @Override public Mono<Void> execute(final ServerWebExchange exchange) { return Mono.defer(() -> { if (this.index < plugins.size()) { SoulPlugin plugin = plugins.get(this.index++); Boolean skip = plugin.skip(exchange); if (skip) { return this.execute(exchange); } else { return plugin.execute(exchange, this); } } else { return Mono.empty(); } }); }
def OpenFile(self, windows_path): if self._single_file: path_spec = dfvfs_path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=self._source_path) if path_spec is None: return None return dfvfs_resolver.Resolver.OpenFileObject(path_spec) windows_path_upper = windows_path.upper() if windows_path_upper.startswith('%USERPROFILE%'): if not self._mediator: raise dfvfs_errors.ScannerError( 'Unable to proceed. %UserProfile% found in Windows path but no ' 'mediator to determine which user to select.') users_path_spec = self._path_resolver.ResolvePath('\\Users') if users_path_spec is None: raise dfvfs_errors.ScannerError( 'Unable to proceed. %UserProfile% found in Windows path but no ' 'users path found to determine which user to select.') users_file_entry = dfvfs_resolver.Resolver.OpenFileEntry(users_path_spec) self._mediator.PrintUsersSubDirectoriesOverview(users_file_entry) return super(WindowsRegistryCollector, self).OpenFile(windows_path)
/** * An output stream that can be used to flatten Storable objects. * StorableOutput preserves the object identity of the stored objects. * * @see Storable * @see StorableInput */ public class StorableOutput extends StorableInOut { public static org.apache.log4j.Logger logger = org.apache.log4j.Logger .getLogger(StorableOutput.class); private PrintWriter fStream; private int fIndent; /** * Initializes the StorableOutput with the file given by name. */ public StorableOutput(File file) throws FileNotFoundException { this(file.toURI(), new FileOutputStream(file)); } /** * Initializes the StorableOutput with the given output stream. */ public StorableOutput(OutputStream stream) { this(null, stream); } /** * Initializes the StorableOutput with the given output stream. */ private StorableOutput(URI location, OutputStream stream) { super(location); try { fStream = new PrintWriter(new OutputStreamWriter(stream, "UTF8")); } catch (UnsupportedEncodingException e) { logger.error("UTF-8 not supported!"); fStream = new PrintWriter(stream); } fIndent = 0; } /** * Writes a storable object to the output stream. */ public void writeStorable(Storable storable) { if (storable == null) { fStream.print("NULL"); space(); return; } if (mapped(storable)) { writeRef(storable); return; } incrementIndent(); startNewLine(); map(storable); fStream.print(storable.getClass().getName()); space(); storable.write(this); space(); decrementIndent(); } /** * Writes an int to the output stream. */ public void writeInt(int i) { fStream.print(i); space(); } public void writeColor(Color c) { writeInt(c.getRed()); writeInt(c.getGreen()); writeInt(c.getBlue()); } /** * Writes an int to the output stream. */ public void writeDouble(double d) { fStream.print(d); space(); } /** * Writes an int to the output stream. */ public void writeBoolean(boolean b) { if (b) { fStream.print(1); } else { fStream.print(0); } space(); } /** * Writes a string to the output stream. Special characters * are quoted. */ public void writeString(String s) { fStream.print('"'); for (int i = 0; i < s.length(); i++) { char c = s.charAt(i); switch (c) { case '\n': fStream.print('\\'); fStream.print('n'); break; case '"': fStream.print('\\'); fStream.print('"'); break; case '\\': fStream.print('\\'); fStream.print('\\'); break; case '\t': fStream.print('\\'); fStream.print('\t'); break; default: fStream.print(c); } } fStream.print('"'); space(); } /** * Closes a storable output stream. */ public void close() { fStream.close(); } private void writeRef(Storable storable) { int ref = getRef(storable); fStream.print("REF"); space(); fStream.print(ref); space(); } private void incrementIndent() { fIndent += 4; } private void decrementIndent() { fIndent -= 4; if (fIndent < 0) { fIndent = 0; } } private void startNewLine() { fStream.println(); for (int i = 0; i < fIndent; i++) { space(); } } private void space() { fStream.print(' '); } }
A hacker has taken over hundreds of Twitter accounts belonging to supporters of Islamic State since Sunday's attack at a gay nightclub in Orlando, using them to tweet gay pride messages and gay pornography, according to media reports including the Newsweek website. The gunman in Orlando, Omar Mateen, who killed 49 people in his rampage, swore allegiance to ISIS in a call to police in the course of the incident. "A hacker affiliated with the hacktivist collective Anonymous, who uses the online moniker WauchulaGhost, first began hijacking pro-ISIS Twitter accounts several months ago," Newsweek noted. Following Sunday's attack in Orlando, "the hacker decided to replace ISIS imagery posted to the accounts with rainbow flags and pro-LGBT messages," Newsweek reported. “I did it for the lives lost in Orlando,” the hacker, who said he is based in the United States, told Newsweek. "The vigilante hacker claims to have taken over 200 Twitter accounts belonging to ISIS supporters. However, many have since been taken down by Twitter," Newsweek said, noting that ISIS has used social media for recruitment purposes and to spread its propaganda. Original pro-ISIS content on the hacked Twitter accounts was replaced with messages such as “I’m gay and proud” and “Out and proud.” A link to a gay porn site was included in some of the hacked accounts, Newsweek reported, although no explicit images have been posted in respect to Islam. “One thing I do want to say is we aren’t using graphic porn and our purpose is not to offend Muslims,” hacker WachulaGhost said. “Our actions are directed at jihadist extremists. Many of our own [group of hackers] are Muslim and we respect all religions that do not take innocent lives.”
package github import ( "encoding/json" "errors" "fmt" "net/http" "path/filepath" "regexp" "github.com/soerenkoehler/go-simpson/util" ) const tagLatest = "latest" var uploadURLNormalizer = regexp.MustCompile(`\{\?[\w,]+\}$`) type ReleaseInfo struct { Context `json:"-"` ID int64 `json:"id"` Name string `json:"name"` UploadURL string `json:"upload_url"` } func (context Context) CreateRelease(artifacts []string) []error { var errs []error if len(context.Token) == 0 { //lint:ignore ST1005 Github is a proper noun errs = append(errs, errors.New("Github API token not found")) } else if release, err := context.getRelease(); err != nil { errs = append(errs, err) } else { for _, artifact := range artifacts { if err := release.uploadArtifact(artifact); err != nil { errs = append(errs, err) } } } return errs } func (release ReleaseInfo) uploadArtifact(path string) error { _, err := release.apiCallURL( http.MethodPost, fmt.Sprintf( "%v?name=%v", uploadURLNormalizer.ReplaceAllString(release.UploadURL, ""), filepath.Base(path)), util.BodyFromFile(path)) return err } func (context Context) getRelease() (ReleaseInfo, error) { tag := "" if version, ok := context.getPushVersion(); ok { tag = version } else if context.isPushHead() { context.setTag(tagLatest, context.Sha) tag = tagLatest } else { return ReleaseInfo{}, errors.New("pushed neither version tag nor head ref") } release, err := context.getReleaseByTag(tag) if err == nil { return release.updateRelease(tag, release.Name) } return context.createRelease(tag, tag) } func (context Context) getReleaseByTag(tag string) (ReleaseInfo, error) { response, err := context.apiCall(apiGetReleaseByTag, util.BodyReader{}, tag) if err != nil { return ReleaseInfo{}, err } return context.jsonToReleaseInfo(response) } func (release ReleaseInfo) updateRelease( tag string, name string) (ReleaseInfo, error) { _, err := release.apiCall(apiDeleteRelease, util.BodyReader{}, release.ID) if err != nil { return ReleaseInfo{}, err } return release.createRelease(tag, name) } func (context Context) createRelease( tag string, name string) (ReleaseInfo, error) { response, err := context.apiCall( apiCreateRelease, util.BodyFromMap(map[string]string{ "tag_name": tag, "name": name, })) if err != nil { return ReleaseInfo{}, err } return context.jsonToReleaseInfo(response) } func (context Context) jsonToReleaseInfo(jsonData string) (ReleaseInfo, error) { result := ReleaseInfo{ Context: context, } err := json.Unmarshal([]byte(jsonData), &result) return result, err }
package com.zipcode.justcode.clamfortress.ClamFortress.models.game.models.items.military.plating; import com.zipcode.justcode.clamfortress.ClamFortress.models.game.models.items.military.armor.*; public class IronPlating extends AbstractPlating { public IronPlating() { super("Bronze Plating", "Standard iron plate. Used to improve armor defense bonuses by 200."); } @Override public Integer modifyArmorDef(AbstractArmor armor) { return 200; } @Override public IronPlating clone() { return new IronPlating(); } }
package main import ( "fmt" "git.kirsle.net/go/render" "git.kirsle.net/go/ui" ) func main() { mw, err := ui.NewMainWindow("UI Toolkit Demo") if err != nil { panic(err) } leftFrame := ui.NewFrame("Left Frame") leftFrame.Configure(ui.Config{ Width: 160, BorderSize: 1, BorderStyle: ui.BorderRaised, Background: render.Grey, }) mw.Pack(leftFrame, ui.Pack{ Side: ui.W, FillY: true, }) mainFrame := ui.NewFrame("Main Frame") mainFrame.Configure(ui.Config{ Background: render.RGBA(255, 255, 255, 180), }) mw.Pack(mainFrame, ui.Pack{ Side: ui.W, Expand: true, PadX: 10, }) label := ui.NewLabel(ui.Label{ Text: "Hello world", }) leftFrame.Pack(label, ui.Pack{ Side: ui.N, PadY: 12, }) // Draw some buttons in the left frame. for i := 1; i <= 12; i++ { i := i btn := ui.NewButton(fmt.Sprintf("Button-%d", i), ui.NewLabel(ui.Label{ Text: fmt.Sprintf("Button #%d", i), })) btn.Handle(ui.Click, func(ed ui.EventData) { fmt.Printf("Button %d was clicked\n", i) }) // Add the button to the MainWindow's event supervisor, so it may be // clicked and interacted with. mw.Add(btn) leftFrame.Pack(btn, ui.Pack{ Side: ui.N, FillX: true, PadY: 2, }) } // Frame to show off check buttons. mainFrame.Pack(radioButtonFrame(mw), ui.Pack{ Side: ui.N, FillX: true, PadY: 8, }) err = mw.MainLoop() if err != nil { panic("MainLoop:" + err.Error()) } } // Frame that shows off radio buttons. func radioButtonFrame(mw *ui.MainWindow) *ui.Frame { // The string variable that will be bound to the radio buttons. // This could also be a global variable at the package level. radioValue := "Red" // Main frame. frame := ui.NewFrame("radio button demo") frame.Configure(ui.Config{ Background: render.RGBA(153, 255, 153, 255), BorderSize: 1, BorderStyle: ui.BorderRaised, }) // Top row to show the label and current radiobutton bound value. topFrame := ui.NewFrame("radio button label frame") frame.Pack(topFrame, ui.Pack{ Side: ui.N, FillX: true, }) // Draw the labels. { label := ui.NewLabel(ui.Label{ Text: "Radio buttons. Value:", }) topFrame.Pack(label, ui.Pack{ Side: ui.W, }) valueLabel := ui.NewLabel(ui.Label{ TextVariable: &radioValue, }) topFrame.Pack(valueLabel, ui.Pack{ Side: ui.W, PadX: 4, }) } // The radio buttons themselves. btnFrame := ui.NewFrame("radio button frame") frame.Pack(btnFrame, ui.Pack{ Side: ui.N, FillX: true, }) { colors := []string{"Red", "Green", "Blue", "Yellow"} for _, color := range colors { color := color btn := ui.NewRadioButton("color:"+color, &radioValue, color, ui.NewLabel(ui.Label{ Text: color, })) mw.Add(btn) btnFrame.Pack(btn, ui.Pack{ Side: ui.W, PadX: 2, }) } } return frame }
Effect of methionine hydroxy analog feed supplements: Significant alteration and enrichment of rumen microbiota and metabolome in Hu sheep Methionine hydroxy analogs (MHA) are widely used as the main sources of methionine in ruminant feed production. The purpose of this study was to explore the effect of using MHA supplements such as MHA as a salt of calcium (MHA-Ca) and 2-hydroxy-4-(methylthio)-butanoic acid isopropyl ester (HMBi) as sources of methionine on the rumen microbiota and metabolome in Hu sheep. Seventy-two healthy Hu sheep were randomly assigned to three dietary treatment groups: control, MHA-Ca, and HMBi groups. The results showed that the concentrations of total volatile fatty acids, acetate, and propionate were higher in the HMBi group than in the control group. The HMBi and MHA-Ca groups had higher alpha diversity values than those in control group. We compared the rumen microbiota by using 16S rRNA gene sequencing. At the phylum level, the HMBi group had a higher relative abundance of Firmicutes and a lower relative abundance of Synergistetes than did the control group. At the genus level, the control group had a higher relative abundance of Treponema_2 than did the HBMi group and a higher relative abundance of Prevotellaceae_UCG_004 than did the MHA-Ca group. Metabolomic analyses revealed that fatty acids, amino acids, lipids, organic acids, sugars, amines, and nucleosides were significantly altered in both MHA-Ca and HMBi groups. Metabolites with significant differences were enriched in amino acid and carbohydrate metabolisms, such as phenylalanine metabolism, biosynthesis of amino acids, tryptophan metabolism, galactose metabolism, and tyrosine metabolism. Above all, the findings presented in this study indicate that MHA alter the rumen microbiota and metabolites and that different forms of MHA have different impacts. The results of our study contribute to a better understanding of the effects of MHA. Introduction Many studies have reported that methionine (Met) is the first or second limiting amino acid in ruminant protein synthesis because of its low concentration in dietary feed (1). Met is degraded by microorganisms after entering the rumen, which results in a lesser quantity of Met reaching the small intestine for utilization and absorption. One approach for the protection of Met from degradation in the rumen is to supply methionine hydroxy analogs (MHA) that resist microbial breakdown (2). The isopropyl ester of 2-hydroxy 4-(methylthio)-butanoic acid (HMBi) and calcium salt of the hydroxy analog of methionine (MHA-Ca) are two different forms of MHA. HMBi is produced by the esterification of 2-hydroxy-4-methylthiobutyric acid with isopropanol (3). MHA-Ca is obtained by neutralizing an MHA with calcium hydroxide or calcium oxide, which is then dried, crushed, and sieved (4). It has been determined that ∼50% of MHA is directly absorbed by the rumen wall and converted into Met and the rest is metabolized in the rumen utilized by the rumen microorganisms of adult cattle (5,6). Previous studies have shown that supplementation with HMBi results in increased milk fat content and protein yield (7,8). Extensive research has also indicated that that dietary supplementation of HMBi can improve the growth performance and the feed efficiency of finishing beef cattle by potentially changing bacterial community (6). Evidence suggests that the addition of MHA-Ca to methionine-deficient diets can significantly improve nitrogen deposition in pigs (9). The addition of 0.15% MHA-Ca to the diet of lactating dairy cow can effectively improve the production performance, milk quality, and feed digestion as well as utilization (10). Studies on the effects of MHA on rumen fermentation have shown conflicting results. MHA has no apparent effect on rumen fermentation (11). However, dietary supplementation with 2-hydroxy-4-(methylthio)-butanoic acid (HMB) and HMBi increased the concentrations of volatile fatty acids (VFA) and the abundance of Lactobacillus and Flavobacterium in the rumen (12). The rumen is a complex microbial ecosystem inhabited by various microorganisms such as bacteria, protozoa, fungi, and viruses (13). According to previous research, dietary changes can have a significant effect on the microbiota (14). The gut microbiota can have dramatic effects on host development and metabolism. Studies in mice suggest that gut microbiota may influence brain development and function (15). Changes in metabolite production may also affect the fatty acid metabolism of hosts, thereby altering their lifespan (16). MHA is partially degraded in the rumen and can serve as a source of Met for rumen microorganisms. Whether MHA can affect animal growth and development by altering rumen microbiota and metabolism remains unknown. This lack of knowledge has resulted in an inability to properly evaluate the effects of MHA on ruminant feed production. Hu sheep are native to China and are excellent for both wool and meat (17). Previous research have mainly concentrate on the effects of MHA on the performance of high-production dairy cows (18,19). Few studies have investigated the effect of MHA on rumen microbiota and metabolome in Hu sheep. Consequently, the main purpose of this research was to describe the effects of consuming feed with different MHA on the rumen microbiota and metabolome in Hu sheep. In addition, the relationship between the rumen microbiota and metabolome was also determined. Materials and methods Animals, diets, and experimental design Seventy-two healthy Hu sheep comprised half male and half female (60-70 days old and weighing 19.99 ± 2.04 kg) were randomly divided into three dietary treatment groups: control (CON), MHA-Ca, and HMBi groups. Each group consisted of 24 Hu sheep, half male and half female, divided into 6 replicates (each replicate consisted of 4 sheep of the same sex), and each replicate was housed in individual pen. The sheep house was a semi-closed sheep house. The experimental period was autumn in China. The temperature range was 5-25 • C, the relative humidity was 40-80%, and the light was natural light. All the sheep were fed twice a day at 06:00 h and 19:00 h, and water was available over the course of the experiment. All animals were fed a pelleted total mixed ration diet. The diet was formulated in conformity with the feeding standards of meat-producing sheep (NY/T816-2004, China). The nutrient composition of the basic diet was essentially the same among the three treatments except for the supplementation of MHA. The chemical compositions and ingredients of the diets are listed in Table 1. The CON group was fed the same basal diet with no rumen-protected Met, whereas diets of the MHA-Ca and HMBi groups were additionally supplemented with 1.5 kg/t MHA-Ca and 1.5 kg/t HMBi. There were 80 days in the experimental period, with the first 7 days being used for preadaptation. Sample collection and measurements All Hu sheep were kept food-deprived for 24 h and water-deprived for 2 h. Then, one sheep from each replicate was randomly selected for slaughter (i.e., a total of six sheep from each group). We collected the ruminal fluid samples immediately after slaughter and strained them through cheesecloth. Approximately 20 mL of ruminal fluid was stored in liquid nitrogen for the determination of fermentation parameters, extraction of microbial DNA, and metabolome measurements. The fermentation parameters were analyzed . /fvets. . Microbial DNA extraction and sequencing Using the DNA extraction kit (QIAGEN, D Neasy Power Soil Kit 10) to extract the total genomic DNA of the ruminal fluid sample. The purity and concentration of the DNA was verified with Nano Drop and agarose gel. Taking an appropriate amount of the sample into a centrifuge tube, dilute the sample with sterile water to 1 ng/µ L. Using the diluted genomic DNA as the template, according to the selection of the sequencing region, using the specific primers with Barcode, Takara Ex Taq high-fidelity enzyme from Takara Company for PCR to ensure the amplification efficiency and accuracy. For bacterial diversity analysis, V3-V4 (or V4-V5) variable regions of 16S rRNA genes was amplified with primer pairs 343F (5'-TACGGRAGGCAGCAG−3') and 798R (5'-AGGGTATCTAATCCT-3'). Rumen microbiota bioinformatic analyses Raw data is in FASTQ format. The original paired-end sequences were descrambled using Trimmomatic version 0.35 software (21). Detected and truncated the ambiguous base N and used the sliding window method to check the average base quality (21). When the quality was lower than 20, the preceding high-quality sequence was truncated. The paired-end sequence after decontamination was performed using FLASH version 1.2.11 software (22). The splicing parameters were as follows: the smallest overlap was 10 bp, the largest overlap was 200 bp, and the largest mismatch rate was 20%. To ensure the accuracy of the results, precise impurity removal could be performed to remove sequences containing ambiguous bases, homologous single bases, and sequences that were too short in length. The parameters for precise impurity removal were: remove sequences containing N bases, and retain sequences with a base quality score Q20 of at least 75% (23). After the sequencing data was preprocessed to generate high-quality sequences, the Vsearch version 2.4.2 software was used to classify the sequences into multiple OTUs according to the similarity of the sequences (24). The parameter was sequence similarity greater than or equal to 97% to be classified as an OTU unit (25). The representative sequences of each OTU were selected using the QIIME version 1.8.0 software (http:// qiime.org/scripts/index.html) and all representative sequences were aligned and annotated with the database (26). 16S was aligned with Green genes or Silva version123 database, species alignment was annotated with RDP classifier software, and annotation results with confidence intervals >0.7 were retained (24). The relative abundance of the phylogenetic investigation of communities by reconstruction of unobserved states PICRUStpredicted metabolic pathways of ruminal bacterial microbiome in three groups. The extended error bar plot was generated using STAMP software (http://kiwi.cs.dal.ca/Software/). Metabolomic measurement Eighty microliter of sample was transfered into 1.5 ml EP tubes. Then 10 µL of internal standard (L-2chlorophenylalanine, 0.3 mg/mL in methanol) was added and vortexed for 10 s. Two hundred forty microliter of methanolacetonitrile (2:1) mixed solution were added and vortexed for 1 min. Ultrasonic extraction in ice water bathed for 5 min and were stand at−20 • C for 10 min. The sample was centrifuged for 10 min (12,000 rpm, 4 • C) and transfered 150 µL of the supernatant into a glass derivatization bottle. Quality control samples (QCs) were prepared by mixing equal volumes of extracts from all samples and each QC had the same volume as the sample. Eighty microliter of methoxyamine hydrochloride in pyridine (15 mg/mL) was added to a glass derivatized vial, vortexed for 2 min and performed oximation in a shaking incubator at 37 • C for 90 min. After the sample was taken out, 80 µL of BSTFA (containing 1% TMCS) derivatization reagent and 20 µL of n-hexane were added. The sample was left at room temperature for 30 min for GC-MS metabolomic analysis. The samples were tested on an Agilent 7890B gas chromatography system and an Agilent 5977A MSD system (Agilent Technologies Inc., CA, USA). The derivatives were . /fvets. . separated to utilized the DB-5MS fused-silica capillary column (30 m × 0.25 mm × 0.25 µm, Agilent J & W Scientific, Folsom, CA, USA). The injection volume was 1 µL and the injection was splitless with a solvent delay of 5 min. The temperature of the injection port was 260 • C.The initial temperature of the column oven was 60 • C, the temperature was programmed to 125 • C at 8 • C/min, and 5 • C/min was heated to 210 • C; 10 • C/min was heated to 270 • C, 20 • C/min to 305 • C and held for 5 min. Rumen microbiota data preprocessing and statistical analysis The data matrix was imported into SIMCA software (version 14.0, Umetrics, Umea, Sweden) and autonomous principal component analysis (PCA) was used to determine the overall distribution among samples and the process of general analysis. The partial least squares analysis (PLS-DA) and orthogonal partial least squares analysis (OPLS-DA) were used to distinguish the overall differences in metabolic profiles between groups and to find differential metabolites between groups. The combination of multidimensional analysis and single-dimensional analysis were used to compare the differential metabolites between groups. In the OPLS-DA analysis, the variable weight value (Variable important in projection, VIP) was used to measure the impact strength and explanatory power of the expression pattern of each metabolite on the classification and discrimination of each group of samples. Metabolites with VIP>1 were considered differential metabolites. Further t-test (student's t-test) was used to verify whether the metabolite differences between groups were significant. Data analyses The data on ruminal fermentation parameters were analyzed using a one-way analysis of variance (SPSS v. 25.0, SPSS Inc., Chicago, IL, USA). The results were expressed as mean ± standard deviation, and p < 0.05 was considered a significant difference. Principal coordinate analysis was used to detect differences between the microbial communities from different experimental groups. Wilcoxon-Mann-Whitney U-test was used to identify phylum and genus-level differences in microbes. Metabolites with variable influence on projection (VIP) values larger than 1.0 and p-values from a two-tailed Student's t-test <0.05 were considered differential metabolites. The correlation matrix between bacterial families and altered metabolite levels was generated using Spearman's correlation coefficients and visualized using R language. Ruminal fermentation parameters The HMBi group had higher concentrations of total volatile fatty acid, acetate, and propionate (p = 0.03, p = 0.02, and p = 0.03, respectively), than the CON group. However, there was no significant difference in pH, butyrate, isovalerate, and valerate concentrations between the three groups (Table 2). Richness, diversity, and composition of the ruminal bacterial communities The number of observed species was higher (p = 0.015) in the HMBi group than in the CON group. The Chao 1 value was higher (p < 0.01) in both MHA-Ca and HMBi groups than that in the CON group (Table 3). There was no significant difference in the Shannon-Wiener and Simpson indices among the three groups. According to the principal coordinate analysis profile (Figure 1), the CON, MHA-Ca, and HMBi groups were detached (axis 1 + axis 2 + axis 3 = 55.2%). In total, we identified 18 bacterial phyla in the ruminal fluid samples. Abundant taxa (relative abundance ≥ 0.01%) are presented in Table 4. The analyzed DNA sequences typically belonged to the phyla Bacteroidetes, Firmicutes, and Proteobacteria. We found that the HMBi group had a significantly higher (p = 0.04) relative abundance of Firmicutes than the CON group did. In contrast, the relative abundance of CON (Control group) = sheep fed total mixed ration pellet diet with a basal diet. MHA-Ca (MHA-Ca group) = sheep fed total mixed ration pellet diet additionally supplemented with MHA-Ca. HMBi (HMBi group) = sheep fed total mixed ration pellet diet additionally supplemented. a,b The same letters indicate no significant difference in functional abundance between the two groups; different letters indicate significant differences in functional abundance between the two groups. Synergistetes was significantly higher in the CON group than that in the HMBi group. No significance was found in the abundance of Bacteroidetes, Spirochaetae, Tenericutes and other bacterial phyla between the three groups. In total, we identified 190 taxa in the ruminal fluid samples. The abundant taxa (relative abundance ≥ 0.01%) are presented for clarity and visualization in Table 5. The relative abundance of Ambiguous taxa was higher (p = 0.01) in both the MHA-Ca and HMBi groups than in the CON group. In the CON group, the relative abundance of Prevotellaceae_UCG_004 was higher (p = 0.04) than that in the MHA-Ca group, whereas the relative abundance of Ruminococcaceae_UCG_014 taxa was lower (p = 0.03) than that in the HBMi group. In addition, a significant increase (p = 0.04) in the relative abundance of Treponema_2 was observed in the CON group compared with that in the HBMi group. The other genera showed no significant difference between the CON, MHA-Ca, and HMBi groups. Predicted functions of ruminal bacterial microbiota This study inferred that 18 gene families in the ruminal microbiota showed significantly different abundances between the CON and the MHA-Ca groups (Figure 2A), and 25 gene families in the ruminal microbiota showed significantly different abundances between the CON and HMBi groups ( Figure 2B). Furthermore, compared with the CON group, the gene families involved in base excision repair, aminoacy-tRNA biosynthesis, and amino acid-related enzymes were significantly decreased, Frontiers in Veterinary Science frontiersin.org . /fvets. . The same letters indicate no significant difference in functional abundance between the two groups; different letters indicate significant differences in functional abundance between the two groups. The same letters indicate no significant difference in functional abundance between the two groups; different letters indicate significant differences in functional abundance between the two groups. Frontiers in Veterinary Science frontiersin.org . /fvets. . The extended error bar plot was generated using STAMP software. Welch's two-sided test was used, and Welch's inverted test was . . whereas that for thiamine metabolism was significantly increased in both the MHA-Ca and HMBi groups. GC/MS analysis of the ruminal fluid To explore whether changes in rumen microbes could lead to alterations in rumen metabolites, GC/MS-based metabolome profiling was used to characterize rumen metabolism. A total of 238 metabolites were found in the rumen samples, including amino acids, organic acids, lipids, sugars, amines, and nucleosides. Orthogonal partial least squares discriminant analysis was performed on the metabolites in the CON, MHA-Ca, and HMBi groups. The score plot showed a clear separation between the CON and MHA-Ca groups ( Figure 3A) and between the CON and HMBi groups ( Figure 3B). This indicates the differences in metabolites in the rumen among the three groups. Di erences in ruminal metabolites between CON, MHA-Ca, and HMBi groups In addition, we compared these three sets of data to detect differences in metabolites between the three groups. In total, 62 differential metabolites were detected between MHA-Ca and CON, and 84 differential metabolites were observed between the HMBi and CON groups. Based on the statistical analysis and the VIP value obtained from the partial least squares discriminant analysis (false discovery rate < 0.05, and VIP > 1), 9 differential metabolites were identified in MHA-Ca and CON groups, 24 differential metabolites were identified in the HMBi and CON groups. These metabolites were shown in Tables 6, 7, respectively. Generally, the ruminal metabolites differed mainly in fatty acids, amino acids, organic acids, amines, and nucleosides among the three groups. Metabolic pathways of di erential metabolites Pathway analysis was conducted to supply a inclusive view of the different metabolites between the Hu sheep in the CON, MHA-Ca, and HMBi groups. Results revealed that pentose and glucuronate interconversions, 2-oxocarboxylic acid metabolism, citrate cycle, biosynthesis of amino acids, glucagon signaling pathway, carbon metabolism, phenylalanine, tyrosine, and tryptophan biosynthesis, central carbon metabolism in cancer, phenylalanine metabolism, and tryptophan metabolism were the top 10 pathways which were significantly enriched in MHA-Ca group compared with CON group ( Figure 4A). Results of the comparison between the HMBi and CON groups showed that alanine, aspartate, and glutamate metabolism, Parkinson's disease, retrograde endocannabinoid signaling, galactose metabolism, tyrosine metabolism, cAMP signaling pathway, pentose and glucuronate interconversions, morphine addiction, nicotinate and nicotinamide metabolism, and alcoholism were the top 10 pathways which were significantly enriched in HMBi group compared with CON group ( Figure 4B). Correlation analyses between the ruminal metabolome and microbiome Correlation analyses were performed using Spearman's correlation coefficients obtained for the microorganisms and differential metabolites in the CON, MHA-Ca, and HMBi Frontiers in Veterinary Science frontiersin.org . /fvets. . groups to investigate potential host-microbiota metabolic interactions. The results of these analyses indicate a complex connection between rumen microbiota and metabolites. For example, the Spearman's correlation constructed using data from the CON and MHA-Ca groups showed that Oscillibacter, Ambiguous taxa, and _coprostanoligenes_group were negatively correlated with most metabolites (Figure 5A), and in the CON and HMBi groups, Ruminococcaceae_UCG_014 was negatively associated with eight metabolites (including proline, sorbitol, 4-aminobutyric acid, and kynurenine) ( Figure 5B). Discussion For Hu sheep, the rumen plays an important role in digestion, metabolism, and health (27). VFA, such as acetate, butyrate, and propionate, are the end products of rumen microbial fermentation and are considered one of the rumen fermentation indexes (28). Dietary or feed additives can affect ruminal fermentation patterns. Changes in rumen VFA concentrations may reflect changes in rumen fermentation patterns (29, 30). In our study, the results showed that MHA can significantly enhance the rumen fermentation characteristics of Hu sheep, and the concentrations of acetate, butyrate, and total VFA were higher in both the MHA-Ca and HMBi groups than in the CON group. These results are consistent with those of Zhou (31), who also found that HMBi can increase the concentrations of acetate, butyrate, and total VFA in vitro. Previous studies have reported that MHA can promote rumen fermentation, acetate concentration was significantly decreased by HMBi deduction (6,32). A possible explanation for this might be that MHA could use free ammonia nitrogen in the rumen to synthesize Met, thereby increasing the amount of Met in the rumen. Met promotes the establishment of microflora and improves the digestibility of nitrogenous compounds and carbohydrates in the rumen. As an indicator of the ruminal microbiota physiological state, the microbiota's richness and diversity are important. Ruminal bacterial microbiota can be influenced by dietary feed (33). In the present study, we investigate the impact of MHA feeding on the composition and diversity of the rumen bacterial community. At the phylum level, Firmicutes and Bacteroidetes were the two major phyla in all three groups. At the genus level, Prevotella_1, uncultured rumen bacterium, Rikenellaceae_RC9_gut_group, and Ambiguous taxa were highly abundant. In addition, The HMBi and MHA-Ca groups had higher alpha diversity values than those in control group, which showed that the microbial community composition was changed and tended to be more diverse in Hu sheep in both the HMBi and MHA-Ca groups. These results are consistent with those of previous research (34,35), and previous studies have found that Met deficiency hinders the growth and reproduction of rumen bacteria and protozoa (36). MHA promotes rumen bacterial growth (37), and MHA and HMBi supplementation increase VFA concentrations in the rumen and the ruminal abundance of Fibrobacter succinogenes and Ruminococcus flavefaciens (12). This result can be explained by the fact that MHA can be used by ruminal bacterial microbiota to promote the proliferation of rumen microorganisms. Another important finding was that the MHA-Ca and HMBi groups had a higher relative abundance of Firmicutes than did the CON group. Firmicutes are a core bacterial component of the rumen (38). It contains many fiberdecomposing bacteria, including Ruminococcus, Butyvibrio, and Eubacterium. The main function of Firmicutes is to degrade fiber and cellulose (39, 40). Previous research have revealed that dandelions (as potential functional feed additives) could improve the abundance of rumen Firmicutes bacteria, and a higher abundance of Firmicutes could enhance rumen fermentation (41). When the supply of HMBi was reduced, the ruminal microbiota was inhibited, which led to a decrease in the Shannon index and the relative abundance of Firmicutes (32). This is consistent with our study in which the concentrations of total VFA, acetate, and propionate were higher in both the MHA-Ca and HMBi groups than in the CON group. A possible explanation for this might be that MHA-Ca and HMBi can be metabolized to MHA in the rumen and thus enhance rumen fermentation by increasing the abundance of rumen Firmicutes bacteria. Metabolomics explains phenotypic changes better than genomics or proteomics (42). To better understand the effects of MHA on rumen microorganisms, we conducted a metabolomic study. Our data showed that the concentrations of many rumen metabolites changed in both the MHA-Ca and HMBi groups, which might be associated with changes in rumen microbial abundance. Compared with the CON group, several organic acids, such as hydrocinnamic, quinic, chlorogenic, 4hydroxycinnamic, and pipecolic acids, were less abundant in both the MHA-Ca and HMBi groups. In contrast, levels of 4-hydroxycinnamic, 3-hydroxybutyric, and 2-hydroxybutanoic acids were higher in the HMBi group than in the CON 45). Chlorogenic acid is a polyphenol with a strong bacteriostatic activity (46,47). Kynurenine, arabitol, sorbitol, and N-acetylornithine were also identified. In addition, correlation analysis revealed that arabitol and sorbitol had a high positive correlation with Pyramidobacter and Succinatimonas. N-acetylornithine and kynurenine were negatively correlated with Ambiguous taxa and Ruminococcaceae_UCG_014. Using the Kyoto encyclopedia of genes and genomes analysis, we found that the metabolites with significant differences were mainly enriched in amino acid and carbohydrate metabolisms, such as phenylalanine metabolism, tryptophan metabolism, alanine, aspartate, and glutamate metabolism, biosynthesis of amino acids, galactose metabolism, and tyrosine metabolism. This is consistent with earlier observations, which showed that the contents of microbial amino acids such as phenylalanine, methionine, isoleucine, and leucine in the rumen liquid and solid phases were significantly increased when fed coated Met and adsorbed MHA (48). Previous research have identified that the metabolism of the rumen microbiota is affected by substrate amino acids (49). Met was used as a nitrogen and carbon source to study its effect on amino acid metabolism of rumen microorganisms, and it was found that adding Met can improve the utilization of valine, serine, cysteine, and histidine, change the metabolism of histidine, and increase the content of the product (50). These results indicate that MHA has a significant effect on amino acid and carbohydrate metabolisms. Conclusion In summary, the present study was designed to determine the effect feeding an MHA to Hu sheep had on the rumen microbiota and metabolome. These findings indicate that ruminal fermentation was stimulated, as evidenced by increased concentrations of total VFA, acetate, and propionate. The composition of the bacterial community, such as Firmicutes and Synergistetes, was altered, and the richness and diversity of the ruminal microbiota were significantly enhanced with MHA supplementation. Metabolomic analysis revealed that some ruminal metabolites were significantly altered, including amino acids, organic acids, and amines. Moreover, correlation analysis of the metabolome and microbiome showed some associations between the microbial groups and metabolites. In general, this study contributes to our understanding of the use of MHA as a feed supplement in Hu sheep. Data availability statement The datasets presented in this study can be found in online repositories. The names of the repository/repositories and accession number(s) can be found at: https://www.ncbi.nlm. nih.gov/, PRJNA833900. Ethics statement The animal study was reviewed and approved by Administration of Affairs Concerning Experimental Animals
// ClearHolidays removes all previously loaded holidays. func (so *Calendar) ClearHolidays() { so.Lock() defer so.Unlock() so.holidays = nil }
/** * Create an action button. * * @param contentActionId The content action id. * @param stringId1 The id of the string to be displayed on the first line of text. * @param stringId2 The id of the string to be displayed on the second line of text. * @return The action. */ private Action createActionButton(int contentActionId, int stringId1, int stringId2, Integer iconResourceId) { Action action = new Action().setId(contentActionId) .setLabel1(mAppContext.getResources().getString(stringId1)) .setLabel2(mAppContext.getResources().getString(stringId2)); if(iconResourceId != null) { action.setIcon(mAppContext.getResources().getDrawable(iconResourceId)); } return action; }
""" my_list = list(range(0,10)) my_list.append(10) print("Step 1:(append)", my_list) my_list.insert(2, 10) print("step 2:(insert)", my_list) my_list.pop() print("step 3:(pop)", my_list) my_list.remove(10) print("step 4:(pop)", my_list) """ # Queue follows FIFO (First in, First Out) # Stack follows LIFO (Last in, First Out) from collections import deque linked_list = deque() linked_list.append(1) linked_list.append(2) linked_list.append(3) # print(linked_list) queue = deque() for i in range(0,5): queue.append(i) # adding an element at the beginning of the queue #print(queue) for i in range(len(queue)): # removing the first element from queue queue.popleft() #print(queue) stack = deque() for i in range(0,5): # adding an element at the beginning of the stack stack.appendleft(i) #print(stack) for i in range(len(stack)): #removing the first element from stack stack.popleft() #print(stack) # custom linked list (for interviews lol) class Node: def __init__(self, data): self.data = data self.next = None class Linked: def __init__(self): self.head = None def show(self): node = self.head while node is not None: print(node.data) node = node.next
/** * Uses the BankAccount compareTo method to sort ascending by account number * Use ANY of the 3 quadratic sorts */ public void sortAscending() { for (int i = 1; i <= accounts.size(); i++) { for (int k = 0; k < accounts.size()-i; k++) { if (accounts.get(k).compareTo(accounts.get(k+1)) > 0) { BankAccount temp = accounts.get(k); accounts.set(k,accounts.get(k+1)); accounts.set(k+1, temp); } } } }
<gh_stars>1-10 from deceased import Deceased from obituary import Obituary from grave import Grave as make_grave #obituary = Obituary("victor", 30 , "SRAG, IRA") class Undertaker: def make_grave(): id =+ 1 #grave = Grave(id,"N", 15, 2 , "brickwork", "Empty", "XG") #grave = Grave() #return grave #print(grave.type) def bury(): deceased_test.buried = False return deceased_test.buried def exhume_checkout_cemitery(): deceased_test.exhume = False def exhume_to_numbered_ossuary(): pass def exhume_to_general_ossuary(): pass def judicial_exhume(): pass def administrative_exhume(): pass
<filename>Applications/InCallService/PHVideoCallViewController.h // // Generated by class-dump 3.5 (64 bit). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>. // #import "PHCallViewController.h" #import "AVCEffectsDelegate.h" #import "CFXCameraViewControllerDelegate.h" #import "CFXEffectBrowserContentPresenterDelegate.h" #import "CNKBuzzedTransitioningProtocol.h" #import "CNKFaceTimeCameraViewControllerDelegate.h" #import "CNKFaceTimeInCallControlsDragControllerDelegate.h" #import "CNKFaceTimeInCallControlsViewControllerDelegate.h" #import "MFMessageComposeViewControllerDelegate.h" #import "PHAudioRoutingAlertDataSource.h" #import "PHAudioRoutingAlertDelegate.h" #import "PHCallParticipantsViewDelegate.h" #import "PHVideoCallVideoGroupViewDelegate.h" #import "PHVideoOverlayViewControllerDelegate.h" #import "TUMomentsControllerDelegate.h" #import "TURouteControllerDelegate.h" #import "UIPopoverPresentationControllerDelegate.h" @class AVCEffects, CFXEffectBrowserViewController, CNKEffectsLayoutController, CNKFaceTimeCameraViewController, CUShutterButton, ICSFlashView, ICSInsulatingView, ICSPIPButton, NSArray, NSLayoutConstraint, NSString, NSTimer, PHAudioRoutingAlertController, PHCallParticipantsView, PHStatusBarGradientView, PHSublayerResizingView, PHVideoCallInterfaceOverlayView, PHVideoCallVideoGroupView, PHVideoOverlayViewController, TUMomentsController, UIColor, UIControl, UILabel, UIView, _UIBackdropView; @interface PHVideoCallViewController : PHCallViewController <PHAudioRoutingAlertDataSource, PHAudioRoutingAlertDelegate, UIPopoverPresentationControllerDelegate, CFXCameraViewControllerDelegate, CFXEffectBrowserContentPresenterDelegate, CNKFaceTimeCameraViewControllerDelegate, PHVideoCallVideoGroupViewDelegate, PHCallParticipantsViewDelegate, MFMessageComposeViewControllerDelegate, TUMomentsControllerDelegate, PHVideoOverlayViewControllerDelegate, TURouteControllerDelegate, AVCEffectsDelegate, CNKFaceTimeInCallControlsViewControllerDelegate, CNKBuzzedTransitioningProtocol, CNKFaceTimeInCallControlsDragControllerDelegate> { UIControl *_videoGroupView; // 8 = 0x8 UIView *_pipVideoGroupView; // 16 = 0x10 PHVideoCallVideoGroupView *_pipVideoContainer; // 24 = 0x18 UIView *_pipVideoDimmingView; // 32 = 0x20 UIView *_pipVideoView; // 40 = 0x28 UIView *_pipVideoBackView; // 48 = 0x30 UIView *_pipVideoFrameView; // 56 = 0x38 UIView *_fullScreenVideoGroupView; // 64 = 0x40 UIControl *_fullScreenVideoContainer; // 72 = 0x48 PHSublayerResizingView *_fullScreenVideoView; // 80 = 0x50 PHSublayerResizingView *_fullScreenVideoBackView; // 88 = 0x58 UIView *_pipVideoOverlayView; // 96 = 0x60 _UIBackdropView *_fullScreenVideoBackdropView; // 104 = 0x68 UIView *_pipVideoMutedView; // 112 = 0x70 UILabel *_statsHUDTextLabel; // 120 = 0x78 NSTimer *_statsHUDTimer; // 128 = 0x80 long long _lastKnownOrientation; // 136 = 0x88 PHVideoCallInterfaceOverlayView *_callBarOverlayView; // 144 = 0x90 CNKFaceTimeCameraViewController *_effectsViewController; // 152 = 0x98 NSTimer *_autodismissRouteListTimer; // 160 = 0xa0 _Bool _waitingForLocalVideoFirstFrame; // 168 = 0xa8 _Bool _useLargeTextAndIcons; // 169 = 0xa9 _Bool _isShowingTopBar; // 170 = 0xaa _Bool _isShowingBottomBar; // 171 = 0xab _Bool _isShowingControlsBar; // 172 = 0xac _Bool _isShowingFullConferenceDisplay; // 173 = 0xad _Bool _isShowFullConferenceDisplayAnimationComplete; // 174 = 0xae _Bool _isShowingCallWaitingUI; // 175 = 0xaf _Bool _isShowingLockScreenUI; // 176 = 0xb0 _Bool _isShowingExpandedLocalVideo; // 177 = 0xb1 _Bool _isShowingEffectsBrowser; // 178 = 0xb2 _Bool _effectsEnabled; // 179 = 0xb3 _Bool _isShowingExpandedControls; // 180 = 0xb4 _Bool _shouldShowSensitiveUI; // 181 = 0xb5 _Bool _willTransitionToMultiwayViewController; // 182 = 0xb6 _Bool _willTransitionToStagingViewController; // 183 = 0xb7 _Bool _isRegisteredForPresentedNotifications; // 184 = 0xb8 _Bool _isPresented; // 185 = 0xb9 _Bool _cameraUnavailable; // 186 = 0xba _Bool _wantsVideo; // 187 = 0xbb unsigned long long _rotationType; // 192 = 0xc0 PHCallParticipantsView *_callParticipantsView; // 200 = 0xc8 PHVideoOverlayViewController *_overlayViewController; // 208 = 0xd0 ICSInsulatingView *_momentsIndicatorInsulatingView; // 216 = 0xd8 ICSPIPButton *_flipButton; // 224 = 0xe0 PHAudioRoutingAlertController *_routeListController; // 232 = 0xe8 long long _lockedLocalOrientationForPIP; // 240 = 0xf0 long long _lastValidOrientationForRemoteCamera; // 248 = 0xf8 CDUnknownBlockType _deviceOrientationBlock; // 256 = 0x100 NSTimer *_backupRotationTimer; // 264 = 0x108 PHStatusBarGradientView *_fullScreenVideoStatusBarGradientView; // 272 = 0x110 CFXEffectBrowserViewController *_effectsBrowserViewController; // 280 = 0x118 CNKEffectsLayoutController *_effectsLayoutController; // 288 = 0x120 NSString *_contactName; // 296 = 0x128 PHSublayerResizingView *_fullScreenVideoScreenView; // 304 = 0x130 ICSFlashView *_flashView; // 312 = 0x138 ICSFlashView *_pipVideoFlashView; // 320 = 0x140 CUShutterButton *_shutterButton; // 328 = 0x148 long long _shutterButtonStyle; // 336 = 0x150 NSLayoutConstraint *_shutterButtonLeftConstraint; // 344 = 0x158 NSLayoutConstraint *_shutterButtonRightConstraint; // 352 = 0x160 AVCEffects *_avcEffects; // 360 = 0x168 CDUnknownBlockType _localCameraFlipCompletion; // 368 = 0x170 NSString *_displayLabelString; // 376 = 0x178 UIColor *_startingWindowBackgroundColor; // 384 = 0x180 CDUnknownBlockType _pipStateDidChangeToNonTransitoryCompletionBlock; // 392 = 0x188 TUMomentsController *_momentsController; // 400 = 0x190 struct CGRect _unexpandedPipFrame; // 408 = 0x198 } + (_Bool)isDockState:(long long)arg1; // IMP=0x00000001000bf3c8 + (_Bool)isVisibleState:(long long)arg1; // IMP=0x00000001000bf3b8 + (_Bool)isExpandedState:(long long)arg1; // IMP=0x00000001000bf3a8 + (long long)stateWithoutDockForState:(long long)arg1; // IMP=0x00000001000bf32c + (id)dismissalAssertionReason; // IMP=0x00000001000accb4 + (_Bool)_shouldForwardViewWillTransitionToSize; // IMP=0x000000010007e898 @property(retain, nonatomic) TUMomentsController *momentsController; // @synthesize momentsController=_momentsController; @property(copy) CDUnknownBlockType pipStateDidChangeToNonTransitoryCompletionBlock; // @synthesize pipStateDidChangeToNonTransitoryCompletionBlock=_pipStateDidChangeToNonTransitoryCompletionBlock; @property(retain, nonatomic) UIColor *startingWindowBackgroundColor; // @synthesize startingWindowBackgroundColor=_startingWindowBackgroundColor; @property(nonatomic) _Bool wantsVideo; // @synthesize wantsVideo=_wantsVideo; @property _Bool cameraUnavailable; // @synthesize cameraUnavailable=_cameraUnavailable; @property _Bool isPresented; // @synthesize isPresented=_isPresented; @property(retain) NSString *displayLabelString; // @synthesize displayLabelString=_displayLabelString; @property _Bool isRegisteredForPresentedNotifications; // @synthesize isRegisteredForPresentedNotifications=_isRegisteredForPresentedNotifications; @property(retain, nonatomic) PHVideoCallInterfaceOverlayView *callBarOverlayView; // @synthesize callBarOverlayView=_callBarOverlayView; @property(copy, nonatomic) CDUnknownBlockType localCameraFlipCompletion; // @synthesize localCameraFlipCompletion=_localCameraFlipCompletion; @property(retain, nonatomic) AVCEffects *avcEffects; // @synthesize avcEffects=_avcEffects; @property(retain, nonatomic) UIView *pipVideoMutedView; // @synthesize pipVideoMutedView=_pipVideoMutedView; @property(retain, nonatomic) UIView *pipVideoOverlayView; // @synthesize pipVideoOverlayView=_pipVideoOverlayView; @property(nonatomic) struct CGRect unexpandedPipFrame; // @synthesize unexpandedPipFrame=_unexpandedPipFrame; @property(retain, nonatomic) NSLayoutConstraint *shutterButtonRightConstraint; // @synthesize shutterButtonRightConstraint=_shutterButtonRightConstraint; @property(retain, nonatomic) NSLayoutConstraint *shutterButtonLeftConstraint; // @synthesize shutterButtonLeftConstraint=_shutterButtonLeftConstraint; @property(nonatomic) long long shutterButtonStyle; // @synthesize shutterButtonStyle=_shutterButtonStyle; @property(retain, nonatomic) CUShutterButton *shutterButton; // @synthesize shutterButton=_shutterButton; @property(retain, nonatomic) ICSFlashView *pipVideoFlashView; // @synthesize pipVideoFlashView=_pipVideoFlashView; @property(retain, nonatomic) ICSFlashView *flashView; // @synthesize flashView=_flashView; @property(retain, nonatomic) PHSublayerResizingView *fullScreenVideoScreenView; // @synthesize fullScreenVideoScreenView=_fullScreenVideoScreenView; @property(retain, nonatomic) PHSublayerResizingView *fullScreenVideoBackView; // @synthesize fullScreenVideoBackView=_fullScreenVideoBackView; @property(retain, nonatomic) PHSublayerResizingView *fullScreenVideoView; // @synthesize fullScreenVideoView=_fullScreenVideoView; @property(retain, nonatomic) UIControl *fullScreenVideoContainer; // @synthesize fullScreenVideoContainer=_fullScreenVideoContainer; @property(retain, nonatomic) UIView *fullScreenVideoGroupView; // @synthesize fullScreenVideoGroupView=_fullScreenVideoGroupView; @property(retain, nonatomic) UIView *pipVideoFrameView; // @synthesize pipVideoFrameView=_pipVideoFrameView; @property(retain, nonatomic) UIView *pipVideoBackView; // @synthesize pipVideoBackView=_pipVideoBackView; @property(retain, nonatomic) UIView *pipVideoView; // @synthesize pipVideoView=_pipVideoView; @property(retain, nonatomic) UIView *pipVideoDimmingView; // @synthesize pipVideoDimmingView=_pipVideoDimmingView; @property(retain, nonatomic) PHVideoCallVideoGroupView *pipVideoContainer; // @synthesize pipVideoContainer=_pipVideoContainer; @property(retain, nonatomic) UIView *pipVideoGroupView; // @synthesize pipVideoGroupView=_pipVideoGroupView; @property(retain, nonatomic) UIControl *videoGroupView; // @synthesize videoGroupView=_videoGroupView; @property(copy, nonatomic) NSString *contactName; // @synthesize contactName=_contactName; @property(retain, nonatomic) CNKEffectsLayoutController *effectsLayoutController; // @synthesize effectsLayoutController=_effectsLayoutController; @property(retain, nonatomic) CFXEffectBrowserViewController *effectsBrowserViewController; // @synthesize effectsBrowserViewController=_effectsBrowserViewController; @property(retain, nonatomic) CNKFaceTimeCameraViewController *effectsViewController; // @synthesize effectsViewController=_effectsViewController; @property(nonatomic) _Bool willTransitionToStagingViewController; // @synthesize willTransitionToStagingViewController=_willTransitionToStagingViewController; @property(nonatomic) _Bool willTransitionToMultiwayViewController; // @synthesize willTransitionToMultiwayViewController=_willTransitionToMultiwayViewController; @property(retain) PHStatusBarGradientView *fullScreenVideoStatusBarGradientView; // @synthesize fullScreenVideoStatusBarGradientView=_fullScreenVideoStatusBarGradientView; @property(retain) NSTimer *backupRotationTimer; // @synthesize backupRotationTimer=_backupRotationTimer; @property(copy) CDUnknownBlockType deviceOrientationBlock; // @synthesize deviceOrientationBlock=_deviceOrientationBlock; @property long long lastValidOrientationForRemoteCamera; // @synthesize lastValidOrientationForRemoteCamera=_lastValidOrientationForRemoteCamera; @property long long lockedLocalOrientationForPIP; // @synthesize lockedLocalOrientationForPIP=_lockedLocalOrientationForPIP; @property(retain, nonatomic) PHAudioRoutingAlertController *routeListController; // @synthesize routeListController=_routeListController; @property(readonly, nonatomic) _Bool shouldShowSensitiveUI; // @synthesize shouldShowSensitiveUI=_shouldShowSensitiveUI; @property(retain, nonatomic) ICSPIPButton *flipButton; // @synthesize flipButton=_flipButton; @property(retain, nonatomic) ICSInsulatingView *momentsIndicatorInsulatingView; // @synthesize momentsIndicatorInsulatingView=_momentsIndicatorInsulatingView; @property(retain, nonatomic) PHVideoOverlayViewController *overlayViewController; // @synthesize overlayViewController=_overlayViewController; @property(retain, nonatomic) PHCallParticipantsView *callParticipantsView; // @synthesize callParticipantsView=_callParticipantsView; @property(readonly, nonatomic) _Bool isShowingExpandedControls; // @synthesize isShowingExpandedControls=_isShowingExpandedControls; @property(nonatomic) _Bool effectsEnabled; // @synthesize effectsEnabled=_effectsEnabled; @property(nonatomic) _Bool isShowingEffectsBrowser; // @synthesize isShowingEffectsBrowser=_isShowingEffectsBrowser; @property(nonatomic) _Bool isShowingExpandedLocalVideo; // @synthesize isShowingExpandedLocalVideo=_isShowingExpandedLocalVideo; @property(nonatomic) _Bool isShowingLockScreenUI; // @synthesize isShowingLockScreenUI=_isShowingLockScreenUI; @property(nonatomic) _Bool isShowingCallWaitingUI; // @synthesize isShowingCallWaitingUI=_isShowingCallWaitingUI; @property(nonatomic) _Bool isShowFullConferenceDisplayAnimationComplete; // @synthesize isShowFullConferenceDisplayAnimationComplete=_isShowFullConferenceDisplayAnimationComplete; @property(nonatomic) _Bool isShowingFullConferenceDisplay; // @synthesize isShowingFullConferenceDisplay=_isShowingFullConferenceDisplay; @property(nonatomic) _Bool isShowingControlsBar; // @synthesize isShowingControlsBar=_isShowingControlsBar; @property(nonatomic) _Bool isShowingBottomBar; // @synthesize isShowingBottomBar=_isShowingBottomBar; @property(nonatomic) _Bool isShowingTopBar; // @synthesize isShowingTopBar=_isShowingTopBar; @property(nonatomic) _Bool useLargeTextAndIcons; // @synthesize useLargeTextAndIcons=_useLargeTextAndIcons; @property(nonatomic) unsigned long long rotationType; // @synthesize rotationType=_rotationType; - (void).cxx_destruct; // IMP=0x00000001000c190c - (id)incomingVideoOrMultiwayCall; // IMP=0x00000001000c0f68 - (id)currentVideoOrMultiwayCall; // IMP=0x00000001000c0d90 @property(readonly, nonatomic) UIView *localParticipantView; @property(readonly, copy, nonatomic) NSArray *disappearingViews; - (id)childViewControllerForHomeIndicatorAutoHidden; // IMP=0x00000001000c0bbc - (_Bool)prefersHomeIndicatorAutoHidden; // IMP=0x00000001000c0b9c - (void)routesChangedForRouteController:(id)arg1; // IMP=0x00000001000c0ae0 - (void)willCaptureRemoteRequestWithMomentsController:(id)arg1; // IMP=0x00000001000c0a4c - (void)momentsController:(id)arg1 didUpdateCapabilities:(id)arg2 forCall:(id)arg3; // IMP=0x00000001000c0968 - (void)videoGroupViewDidStartDrag:(id)arg1; // IMP=0x00000001000c0890 - (void)videoGroupView:(id)arg1 didUpdateCorner:(long long)arg2; // IMP=0x00000001000c0550 - (void)videoOverlayViewController:(id)arg1 isShowingOverlay:(_Bool)arg2; // IMP=0x00000001000c0544 - (void)endCaptureForRequestWithTransactionID:(id)arg1; // IMP=0x00000001000c0298 - (id)nameForPhotoIndicator; // IMP=0x00000001000c01b8 - (void)remoteDidTakePhoto; // IMP=0x00000001000c001c - (void)didTakePhoto; // IMP=0x00000001000bfee8 - (void)capturePhotoForCall:(id)arg1; // IMP=0x00000001000bfa14 - (void)didPressCapturePhotoButton; // IMP=0x00000001000bf8cc - (void)updateFaceTimePhotosButton; // IMP=0x00000001000bf6dc - (void)setupFlashViewIfNecessary; // IMP=0x00000001000bf588 - (void)setupMomentsViewsIfNecessary; // IMP=0x00000001000bf3f0 - (void)updateShutterButtonForControlsState:(long long)arg1; // IMP=0x00000001000bf1f0 - (void)setControlsState:(long long)arg1 animated:(_Bool)arg2 completion:(CDUnknownBlockType)arg3; // IMP=0x00000001000bef58 - (void)openMessagesToAddress:(id)arg1; // IMP=0x00000001000bedcc - (void)inCallControlsViewController:(id)arg1 didTap:(long long)arg2; // IMP=0x00000001000be8fc - (void)inCallControlsDragController:(id)arg1 willStartDragFrom:(long long)arg2; // IMP=0x00000001000be870 - (void)inCallControlsDragController:(id)arg1 didStartDragTo:(long long)arg2; // IMP=0x00000001000be7d0 - (void)inCallControlsDragController:(id)arg1 didEndDragAt:(long long)arg2; // IMP=0x00000001000be710 - (long long)inCallControlsDragControllerShortestState:(id)arg1; // IMP=0x00000001000be6b8 - (void)effectBrowserViewController:(id)arg1 willChangeDockHeight:(double)arg2; // IMP=0x00000001000be510 - (struct CGSize)expandedAppViewControllerSizeForEffectBrowserViewController:(id)arg1; // IMP=0x00000001000be488 - (void)effectBrowserViewController:(id)arg1 dismissExpandedAppViewController:(id)arg2 animated:(_Bool)arg3 completion:(CDUnknownBlockType)arg4; // IMP=0x00000001000be3d4 - (void)effectBrowserViewController:(id)arg1 presentExpandedAppViewController:(id)arg2 animated:(_Bool)arg3 completion:(CDUnknownBlockType)arg4; // IMP=0x00000001000be320 - (_Bool)shouldAlwaysPresentExpandedAppsForEffectBrowserViewController:(id)arg1; // IMP=0x00000001000be2a8 - (void)didSelectApp:(id)arg1; // IMP=0x00000001000be25c - (int)avcEffectsTypeFromEffectsState:(long long)arg1; // IMP=0x00000001000be244 - (void)cameraViewController:(id)arg1 didChangeEffectsState:(long long)arg2; // IMP=0x00000001000be1e4 - (void)cameraViewController:(id)arg1 didRenderFrame:(id)arg2; // IMP=0x00000001000be0f8 - (void)cameraViewControllerPresentationRectWasDoubleTapped:(id)arg1; // IMP=0x00000001000be0c0 - (void)clearEffectsState; // IMP=0x00000001000be028 - (void)serverDidTimeout:(id)arg1; // IMP=0x00000001000bdee4 - (struct CGRect)contentsRectFromPixelBuffer:(struct __CVBuffer *)arg1 presentationRect:(struct CGRect)arg2 shouldSwapOrientation:(_Bool)arg3; // IMP=0x00000001000bde14 - (void)didCaptureVideoFrame:(id)arg1; // IMP=0x00000001000bd98c - (void)removeAllEffectsObjects; // IMP=0x00000001000bd7b4 - (void)_setupEffectsViewControllersIfNecessary; // IMP=0x00000001000bd1c8 - (void)_setupAVCEffectsIfNecessary; // IMP=0x00000001000bd138 - (long long)preferredWhitePointAdaptivityStyle; // IMP=0x00000001000bd130 - (id)overrideStringForParticipantsView:(id)arg1; // IMP=0x00000001000bd128 - (_Bool)shouldShowSingleDurationLabelInParticipantsView:(id)arg1; // IMP=0x00000001000bd120 - (_Bool)shouldShowInfoButtonForParticipantAtIndex:(unsigned long long)arg1 inParticipantsView:(id)arg2; // IMP=0x00000001000bd118 - (unsigned long long)numberOfParticipantsForParticipantsView:(id)arg1; // IMP=0x00000001000bd110 - (unsigned short)activityStateForParticipantAtIndex:(unsigned long long)arg1 inParticipantsView:(id)arg2; // IMP=0x00000001000bd108 - (id)supplementalParticipantLabelFormatStringAtIndex:(unsigned long long)arg1 inParticipantsView:(id)arg2; // IMP=0x00000001000bd100 - (id)labelForParticipantAtIndex:(unsigned long long)arg1 inParticipantsView:(id)arg2; // IMP=0x00000001000bcfdc - (id)nameForParticipantAtIndex:(unsigned long long)arg1 inParticipantsView:(id)arg2; // IMP=0x00000001000bcfd0 - (id)contactForParticipantAtIndex:(unsigned long long)arg1 inParticipantsView:(id)arg2; // IMP=0x00000001000bcfc4 - (struct CGRect)preferredPIPContentRect; // IMP=0x00000001000bcdb8 - (struct CGSize)preferredContentAspectRatio; // IMP=0x00000001000bcdac - (void)hardwareButtonEventNotification:(id)arg1; // IMP=0x00000001000bc978 - (unsigned long long)desiredButtonEvents; // IMP=0x00000001000bc88c - (_Bool)allowsOrientationChangeEvents; // IMP=0x00000001000bc884 - (_Bool)allowsBanners; // IMP=0x00000001000bc848 - (_Bool)allowsMenuButtonDismissal; // IMP=0x00000001000bc80c - (void)_updateRemoteVideoPresentationState; // IMP=0x00000001000bc608 - (void)_informOfRemoteVideoPresentationSizeChange; // IMP=0x00000001000bc3f0 - (void)_handlePIPDidFinishResizingNotification:(id)arg1; // IMP=0x00000001000bc374 - (void)_handlePIPControllerStateDidChangeNotification:(id)arg1; // IMP=0x00000001000bbb88 - (void)_handleVideoCallRemoteVideoStateDidChange:(id)arg1; // IMP=0x00000001000bba74 - (void)_handleVideoCallRemoteScreenDidChange:(id)arg1; // IMP=0x00000001000bb910 - (void)_handleVideoCallRemoteVideoDidChange:(id)arg1; // IMP=0x00000001000bb400 - (void)_handleVideoCallRemoteVideoCameraTypeChanged:(id)arg1; // IMP=0x00000001000bb294 - (void)_handleVideoCallInvitationSent:(id)arg1; // IMP=0x00000001000bb21c - (void)registerCurrentCallForMomentsIfNecessary; // IMP=0x00000001000badec - (void)_handleCallConnected:(id)arg1; // IMP=0x00000001000ba854 - (void)_handleVideoPreviewFirstFrameArrived:(id)arg1; // IMP=0x00000001000ba358 - (void)_handleVideoPreviewDidStopNotification:(id)arg1; // IMP=0x00000001000ba208 - (void)_handleVideoPreviewDidStartNotification:(id)arg1; // IMP=0x00000001000ba1d0 - (void)_handleLocalCameraAvailableNotification:(id)arg1; // IMP=0x00000001000ba12c - (void)_handleLocalCameraErrorNotification:(id)arg1; // IMP=0x00000001000ba044 - (void)setLocalCameraPosition:(long long)arg1 withCompletion:(CDUnknownBlockType)arg2; // IMP=0x00000001000b9e78 - (void)toggleLocalCameraType; // IMP=0x00000001000b9a80 - (void)updateRemoteOverlays; // IMP=0x00000001000b9728 - (void)showLocalViewAsMuted:(_Bool)arg1; // IMP=0x00000001000b92c4 - (id)_viewWithImage:(id)arg1 title:(id)arg2 bounds:(struct CGRect)arg3 center:(struct CGPoint)arg4; // IMP=0x00000001000b8e60 - (void)_ensureFullScreenVideoBackdropViewExists; // IMP=0x00000001000b8d14 - (void)_ensurePIPVideoOverlayViewExists; // IMP=0x00000001000b8b48 - (void)setMuteButtonSelected:(_Bool)arg1; // IMP=0x00000001000b8afc - (void)resumeFromCallWaitingAnimated:(_Bool)arg1; // IMP=0x00000001000b89b0 - (void)prepareForCallWaitingAnimated:(_Bool)arg1; // IMP=0x00000001000b8774 - (void)returnToFullScreenPreviewAnimated:(_Bool)arg1 completion:(CDUnknownBlockType)arg2; // IMP=0x00000001000b810c - (void)synchronizedSecondTickNotification:(id)arg1; // IMP=0x00000001000b8100 - (void)wantsHoldMusicChangedNotification:(id)arg1; // IMP=0x00000001000b7ee8 - (void)pausePreview; // IMP=0x00000001000b7dc0 - (void)stopPreview; // IMP=0x00000001000b7c98 - (void)stopOrPausePreview; // IMP=0x00000001000b7c28 - (void)startPreview; // IMP=0x00000001000b7744 - (void)startPreviewIfNeeded; // IMP=0x00000001000b763c - (void)updateViewControllerForOrientation:(long long)arg1; // IMP=0x00000001000b75d8 - (void)updateAudioButton:(_Bool)arg1; // IMP=0x00000001000b74f8 - (void)uplinkMuteStatusChangedNotification:(id)arg1; // IMP=0x00000001000b7450 - (void)_resetDisplayControllerForNewFaceTimeCall; // IMP=0x00000001000b7360 - (void)handleThreeFingerPress:(id)arg1; // IMP=0x00000001000b72b0 - (void)resetStatsHUDTimer; // IMP=0x00000001000b7274 - (void)setupStatsHUDTimer; // IMP=0x00000001000b71fc - (void)_updateStatsHUD; // IMP=0x00000001000b6da8 - (void)_createStatsHUD; // IMP=0x00000001000b6c48 - (void)_flipRemoteCameraViewToCameraType:(long long)arg1 animated:(_Bool)arg2 withCompletion:(CDUnknownBlockType)arg3; // IMP=0x00000001000b6930 - (void)_flipLocalCameraViewToCameraPosition:(long long)arg1 animated:(_Bool)arg2 withCompletion:(CDUnknownBlockType)arg3; // IMP=0x00000001000b65d8 - (void)updateTopBarLabel; // IMP=0x00000001000b5fa4 - (id)contact; // IMP=0x00000001000b5eb0 - (void)clearCachedContactInfo; // IMP=0x00000001000b5e74 - (void)updateContactInformation; // IMP=0x00000001000b5b38 - (void)_setupVideoLayers; // IMP=0x00000001000b5b00 - (id)createFullScreenVideoViewWithFrame:(struct CGRect)arg1 hidden:(_Bool)arg2 name:(id)arg3; // IMP=0x00000001000b5998 - (void)_ensureVideoLayersExist; // IMP=0x00000001000b4a54 - (void)changeToFullConferenceDisplayAnimated:(_Bool)arg1; // IMP=0x00000001000b3afc - (void)updatePIPToExpanded:(_Bool)arg1; // IMP=0x00000001000b34b8 - (void)showControlsWithCompletion:(CDUnknownBlockType)arg1; // IMP=0x00000001000b3330 - (_Bool)shouldBeInFullConferenceDisplay; // IMP=0x00000001000b3298 - (void)showInCallStateEndedForCall:(id)arg1; // IMP=0x00000001000b2aa4 - (id)_defaultStringForCall:(id)arg1; // IMP=0x00000001000b290c - (void)_delayedEndFaceTimeDisplayAnimation; // IMP=0x00000001000b28b0 - (void)_endFaceTimeDisplayAnimation; // IMP=0x00000001000b27f4 - (void)_deselectEndVideoButton; // IMP=0x00000001000b27ac - (void)_selectEndVideoButton; // IMP=0x00000001000b2764 - (void)_removeOverlayViews; // IMP=0x00000001000b269c - (void)autoFadeOutBottomBar; // IMP=0x00000001000b25c8 - (void)toggleInCallControlsViewState:(_Bool)arg1; // IMP=0x00000001000b23c4 - (void)updateTopBar:(_Bool)arg1 bottomBar:(_Bool)arg2 controlsBar:(_Bool)arg3 animated:(_Bool)arg4; // IMP=0x00000001000b1b64 @property(readonly, nonatomic) _Bool isShowingControlsViewController; - (long long)multiwayOrientationFor:(long long)arg1; // IMP=0x00000001000b1a84 - (void)updateControlsConstraintsControllerWithOrientation:(long long)arg1; // IMP=0x00000001000b1990 - (void)_setupCallBars; // IMP=0x00000001000b0970 - (void)_dissociateVideoLayers; // IMP=0x00000001000b0698 - (void)_dissociateRemoteVideoLayersForCall:(id)arg1; // IMP=0x00000001000b0570 - (void)associateRemoteVideoLayers; // IMP=0x00000001000b0234 - (void)associateLocalVideoLayers; // IMP=0x00000001000b00d8 - (_Bool)shouldDisableEdgeClip; // IMP=0x00000001000b00c8 - (void)resetLocalPreviewState; // IMP=0x00000001000affa0 - (_Bool)supportsBackFacingCamera; // IMP=0x00000001000aff34 - (void)messageComposeViewController:(id)arg1 shouldSendMessage:(id)arg2 toRecipients:(id)arg3 completion:(CDUnknownBlockType)arg4; // IMP=0x00000001000afea8 - (void)messageComposeViewController:(id)arg1 didFinishWithResult:(long long)arg2; // IMP=0x00000001000afe28 - (void)_leaveMessage; // IMP=0x00000001000afae4 - (void)bottomBarActionPerformed:(long long)arg1 withCompletionState:(long long)arg2 fromBar:(id)arg3; // IMP=0x00000001000af2d8 - (void)_endCall; // IMP=0x00000001000aef94 - (void)_toggleLocalVideo; // IMP=0x00000001000aed40 - (void)_toggleMute; // IMP=0x00000001000aece8 - (void)sliderActionFromBar:(id)arg1 slidToProportion:(float)arg2; // IMP=0x00000001000aece4 - (_Bool)shouldShowActionTypeAudioRoute; // IMP=0x00000001000aec68 - (_Bool)shouldShowActionTypePunchOut; // IMP=0x00000001000aebcc - (_Bool)shouldShowActionTypeCameraFlip; // IMP=0x00000001000aebc0 - (_Bool)shouldShowActionTypeEffects; // IMP=0x00000001000aebb8 - (_Bool)shouldShowActionTypePhotoCapture; // IMP=0x00000001000aeb10 - (void)setCurrentState:(unsigned short)arg1 animated:(_Bool)arg2; // IMP=0x00000001000adda8 - (void)setCurrentState:(unsigned short)arg1; // IMP=0x00000001000add98 - (_Bool)_call:(id)arg1 isCrossInvitationWithCall:(id)arg2; // IMP=0x00000001000adcc8 - (void)updateCurrentState; // IMP=0x00000001000ad41c - (void)_handleCallStartedConnecting:(id)arg1; // IMP=0x00000001000ad338 - (void)videoCallStateChangedNotification:(id)arg1; // IMP=0x00000001000acd30 - (void)releaseDismissalAssertion; // IMP=0x00000001000acce0 - (_Bool)needsDismissalAssertion; // IMP=0x00000001000acc60 - (void)didReceiveMemoryWarning; // IMP=0x00000001000acb14 - (void)deregisterForPresentedNotifications; // IMP=0x00000001000ac914 - (void)registerForPresentedNotificationsIfNecessary; // IMP=0x00000001000ac6b8 - (void)viewDidDisappear:(_Bool)arg1; // IMP=0x00000001000ac3e8 - (void)viewWillDisappear:(_Bool)arg1; // IMP=0x00000001000ac250 - (void)viewDidMoveToWindow:(id)arg1 shouldAppearOrDisappear:(_Bool)arg2; // IMP=0x00000001000ac1b0 - (void)viewDidAppear:(_Bool)arg1; // IMP=0x00000001000abff4 - (void)viewWillAppear:(_Bool)arg1; // IMP=0x00000001000abafc - (void)viewDidLayoutSubviews; // IMP=0x00000001000ab938 - (void)_handlePIPDidCancelNotification:(id)arg1; // IMP=0x00000001000ab788 - (void)_sublayerResizingViewDidResizeNotification:(id)arg1; // IMP=0x00000001000ab6b4 - (void)loadView; // IMP=0x00000001000ab3bc - (void)dealloc; // IMP=0x00000001000ab130 - (id)initWithNibName:(id)arg1 bundle:(id)arg2; // IMP=0x00000001000aa94c - (void)toggleLocalCameraType:(CDUnknownBlockType)arg1; // IMP=0x0000000100058544 - (void)_testing_didTransitionToIncomingRingingCallState:(id)arg1; // IMP=0x00000001000583a4 - (void)_testing_didTransitionToIdleCallState; // IMP=0x000000010005825c - (void)_testing_didTransitionToEndedCallState; // IMP=0x00000001000581f8 - (void)_testing_didTransitionToOutgoingRingingCallState:(id)arg1; // IMP=0x0000000100057dbc - (void)_testing_didTransitionToActiveCallState:(id)arg1; // IMP=0x0000000100057d58 - (void)_testing_finishedAnimatingToActiveCallState:(id)arg1; // IMP=0x0000000100057cc4 - (void)_resetPIPToFullScreen; // IMP=0x0000000100071b78 - (void)_adjustPIPBoundsForCurrentOrientationAnimated:(_Bool)arg1; // IMP=0x0000000100071608 - (struct CGRect)_cornerLocationsRectForOrientation:(long long)arg1; // IMP=0x0000000100070fd0 - (struct CGRect)_pipBorderBoundsForPIPSize:(struct CGSize)arg1; // IMP=0x0000000100070fb8 - (struct CGSize)_pipSizeForAspectRatio:(struct CGSize)arg1; // IMP=0x0000000100070e64 - (double)_pipWidth; // IMP=0x0000000100070e50 - (void)_ensureLocalVideoWillBecomeVisible; // IMP=0x0000000100070dc4 - (void)_autoFadeInLocalVideo; // IMP=0x0000000100070d30 - (void)_fadeInLocalVideo; // IMP=0x0000000100070b68 - (double)_pipStatusBarPadding; // IMP=0x0000000100070b34 - (unsigned long long)_flipAnimationOptionsForOrientation:(long long)arg1; // IMP=0x0000000100082320 - (void)_updateLocalVideoOrientationAnimated:(_Bool)arg1; // IMP=0x0000000100082014 - (void)_adjustFullScreenView:(id)arg1 forOrientation:(long long)arg2; // IMP=0x0000000100081c84 - (void)_adjustForOrientationAnimated:(_Bool)arg1; // IMP=0x0000000100081c74 - (void)_adjustRemoteVideoBounds; // IMP=0x000000010008193c - (void)_adjustForOrientationAnimated:(_Bool)arg1 completion:(CDUnknownBlockType)arg2; // IMP=0x0000000100080750 - (void)_reorderPIPAboveBottomBar; // IMP=0x00000001000806cc - (long long)_preferredLocalOrientationForDeviceOrientation; // IMP=0x0000000100080414 - (struct CGRect)_boundsForRemoteVideoInLocalOrientation:(long long)arg1; // IMP=0x000000010007fea4 - (struct CGSize)_remoteVideoAspectRatio; // IMP=0x000000010007fe08 - (struct CGPoint)_centerForRemoteVideoViewInLocalOrientation:(long long)arg1; // IMP=0x000000010007fd8c - (struct CGRect)_boundsForRemoteVideoViewInLocalOrientation:(long long)arg1; // IMP=0x000000010007fcf4 - (struct CGRect)_boundsForRemoteVideoOverlayViewInRemoteCameraOrientation:(long long)arg1; // IMP=0x000000010007fc60 - (struct CGRect)_boundsForLocalMutedViewInLocalOrientation:(long long)arg1; // IMP=0x000000010007fbc8 - (_Bool)_fillRemoteVideoOnScreenWithLocalOrientation:(long long)arg1 remoteAspectRatio:(struct CGSize)arg2; // IMP=0x000000010007fba8 - (struct CGSize)_normalizedScreenSizeForOrientation:(long long)arg1; // IMP=0x000000010007f9ac - (void)_adjustCallBarsForOrientation:(long long)arg1; // IMP=0x000000010007f450 - (void)_adjustFlipButtonForLocalRemoteVideoSwap; // IMP=0x000000010007f3fc - (void)_adjustFlipButtonForOrientation:(long long)arg1; // IMP=0x000000010007f200 - (long long)_lastKnownValidDeviceOrientation; // IMP=0x000000010007f180 - (_Bool)_isValidFaceTimeOrientation:(long long)arg1; // IMP=0x000000010007f160 - (double)_angleForLocalOrientation:(long long)arg1 remoteOrientation:(long long)arg2; // IMP=0x000000010007ee90 - (long long)_deviceOrientationForCameraOrientation:(long long)arg1; // IMP=0x000000010007ee80 - (long long)_cameraOrientationForDeviceOrientation:(long long)arg1; // IMP=0x000000010007ee70 - (void)_applyAutoRotationCorrectionForOrientation:(long long)arg1; // IMP=0x000000010007ed74 - (void)_resetViewLayout:(id)arg1; // IMP=0x000000010007ecbc - (void)_correctAutoRotationTransformForView:(id)arg1; // IMP=0x000000010007ec10 - (void)_applyAutoRotationCorrectionForSize:(struct CGSize)arg1 withTransform:(struct CGAffineTransform)arg2; // IMP=0x000000010007e974 - (struct CGAffineTransform)_transformForInterfaceOrientation:(long long)arg1; // IMP=0x000000010007e8d4 - (unsigned long long)supportedInterfaceOrientations; // IMP=0x000000010007e8a0 - (void)viewWillTransitionToSize:(struct CGSize)arg1 withTransitionCoordinator:(id)arg2; // IMP=0x000000010007e574 - (void)didChangeToDeviceOrientation:(long long)arg1; // IMP=0x000000010007e3e4 - (void)invokeDeviceOrientationBlock; // IMP=0x000000010007e260 - (void)_updateDeviceOrientation:(long long)arg1; // IMP=0x000000010007e064 - (void)didChangeDeviceOrientationNotification:(id)arg1; // IMP=0x000000010007df14 - (void)_removeDeviceOrientationChangeObserver; // IMP=0x000000010007de48 - (void)_addDeviceOrientationChangeObserver; // IMP=0x000000010007dd1c - (void)popoverPresentationControllerDidDismissPopover:(id)arg1; // IMP=0x0000000100082fd4 - (void)audioRoutingAlertControllerDidDismiss:(id)arg1; // IMP=0x0000000100082fc4 - (void)audioRoutingAlertController:(id)arg1 didToggleMute:(_Bool)arg2; // IMP=0x0000000100082f88 - (void)audioRoutingAlertController:(id)arg1 didSelectRoute:(id)arg2; // IMP=0x0000000100082ee0 - (id)muteActionTitleForAudioRoutingAlertController:(id)arg1; // IMP=0x0000000100082e6c - (id)hideActionTitleForAudioRoutingAlertController:(id)arg1; // IMP=0x0000000100082df8 - (id)routesForAudioRoutingAlertController:(id)arg1; // IMP=0x0000000100082d74 - (void)muteConference:(_Bool)arg1; // IMP=0x0000000100082cec - (void)updateUIForMute:(_Bool)arg1; // IMP=0x0000000100082c3c - (void)dismissAudioRoutingDeviceListAnimated:(_Bool)arg1; // IMP=0x00000001000829a0 - (void)dismissAudioRoutingDeviceList; // IMP=0x0000000100082990 - (void)reloadAudioRoutingDeviceList; // IMP=0x0000000100082950 - (void)revealAudioRoutingDeviceListAnimated:(_Bool)arg1; // IMP=0x00000001000823b4 - (void)cancelAutodismissTimer; // IMP=0x000000010008238c - (void)startAutodismissRoutingTimer; // IMP=0x0000000100082348 - (_Bool)gestureRecognizer:(id)arg1 shouldRequireFailureOfGestureRecognizer:(id)arg2; // IMP=0x0000000100083f74 - (_Bool)gestureRecognizer:(id)arg1 shouldReceiveTouch:(id)arg2; // IMP=0x0000000100083e30 - (void)handlePIPDoubleTap:(id)arg1; // IMP=0x0000000100083d38 - (void)handleSingleTapToShowHideUI:(id)arg1; // IMP=0x0000000100083be0 - (void)_addPIPDoubleTapGestureRecognizer; // IMP=0x0000000100083b4c - (void)_addFullScreenPIPContainerViewTapGestureRecognizer; // IMP=0x0000000100083ab8 - (void)_addStatusBarSingleTapGestureRecognizer; // IMP=0x0000000100083a24 - (void)_refreshStatusBarAndPIPAnimated:(_Bool)arg1; // IMP=0x00000001000836bc - (_Bool)canHideBottomBar; // IMP=0x00000001000835a4 - (_Bool)shouldHideStatusBar; // IMP=0x0000000100083584 - (_Bool)shouldShowStatusBar; // IMP=0x0000000100083488 - (_Bool)_currentUIRequiresStatusBar; // IMP=0x0000000100083338 - (_Bool)_currentOrientationSupportsStatusBar; // IMP=0x00000001000832e8 // Remaining properties @property(readonly, copy) NSString *debugDescription; @property(readonly, copy) NSString *description; @property(readonly) unsigned long long hash; @property(readonly) Class superclass; @end
//package practiceShitA; import java.util.Scanner; public class BuyAShowel { public static void main(String[] args) { Scanner in=new Scanner(System.in); String s=in.nextLine(); String a[]=s.split(" "); //System.out.println(a[0]+" "+a[1]); int l=Character.getNumericValue(a[0].charAt(a[0].length()-1)); int r=Integer.parseInt(a[1]); //System.out.println(l+" "+r); if(l==r || l==0) { System.out.println(1); } else { int count=1,k=0,i=2,j=l; while(k!=r) { l=l*i; i++; count++; k=l%10; if(k==0) { break; } l=j; } System.out.println(count); } in.close(); } }
<reponame>coblox/strum #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum CaseStyle { CamelCase, KebabCase, MixedCase, ShoutySnakeCase, SnakeCase, TitleCase, } impl<'s> From<&'s str> for CaseStyle { fn from(text: &'s str) -> CaseStyle { match text { "camel_case" => CaseStyle::CamelCase, "kebab_case" => CaseStyle::KebabCase, "mixed_case" => CaseStyle::MixedCase, "shouty_snake_case" | "shouty_snek_case" => CaseStyle::ShoutySnakeCase, "snake_case" | "snek_case" => CaseStyle::SnakeCase, "title_case" => CaseStyle::TitleCase, _ => panic!( "Unexpected case style for serialize_all: `{}`. Valid values are: `{:?}`", text, [ "camel_case", "kebab_case", "mixed_case", "shouty_snake_case", "snake_case", "title_case" ] ), } } }
// Creates a map from the query parameters of |url|. If |url| contains multiple // values for the same parameter, the last value is used. base::flat_map<std::string, std::string> MakeQueryMap(const GURL& url) { base::flat_map<std::string, std::string> result; for (net::QueryIterator query_it(url); !query_it.IsAtEnd(); query_it.Advance()) { result[std::string(query_it.GetKey())] = query_it.GetUnescapedValue(); } return result; }
def add(self, step): if type(step) is not tuple: transformer = step if getattr(transformer, "name", None): name = transformer.name else: idx = len(self.steps) name = f"step{idx:02d}" step = (name, transformer) self.steps.append(step) return self
/** * Created by Programmer on 6/6/2017. */ public class SellsDatabaseModel { private int id; private String sellsCode; private String customerId; private String totalAmount; private String discount; private String payAmount; private String paymentType; private String sellDate; private String paymentStatus; private String sellBy; public SellsDatabaseModel() { } public SellsDatabaseModel(String sellsCode, String customerId, String totalAmount, String discount, String payAmount, String paymentType, String sellDate, String paymentStatus, String sellBy) { this.sellsCode = sellsCode; this.customerId = customerId; this.totalAmount = totalAmount; this.discount = discount; this.payAmount = payAmount; this.paymentType = paymentType; this.sellDate = sellDate; this.paymentStatus = paymentStatus; this.sellBy = sellBy; } public SellsDatabaseModel(int id, String sellsCode, String customerId, String totalAmount, String discount, String payAmount, String paymentType, String sellDate, String paymentStatus, String sellBy) { this.id = id; this.sellsCode = sellsCode; this.customerId = customerId; this.totalAmount = totalAmount; this.discount = discount; this.payAmount = payAmount; this.paymentType = paymentType; this.sellDate = sellDate; this.paymentStatus = paymentStatus; this.sellBy = sellBy; } public int getId() { return id; } public void setId(int id) { this.id = id; } public String getSellsCode() { return sellsCode; } public void setSellsCode(String sellsCode) { this.sellsCode = sellsCode; } public String getCustomerId() { return customerId; } public void setCustomerId(String customerId) { this.customerId = customerId; } public String getTotalAmount() { return totalAmount; } public void setTotalAmount(String totalAmount) { this.totalAmount = totalAmount; } public String getDiscount() { return discount; } public void setDiscount(String discount) { this.discount = discount; } public String getPayAmount() { return payAmount; } public void setPayAmount(String payAmount) { this.payAmount = payAmount; } public String getPaymentType() { return paymentType; } public void setPaymentType(String paymentType) { this.paymentType = paymentType; } public String getSellDate() { return sellDate; } public void setSellDate(String sellDate) { this.sellDate = sellDate; } public String getPaymentStatus() { return paymentStatus; } public void setPaymentStatus(String paymentStatus) { this.paymentStatus = paymentStatus; } public String getSellBy() { return sellBy; } public void setSellBy(String sellBy) { this.sellBy = sellBy; } }
Joseph Cummins is the author of Anything for a Vote: Dirty Tricks, Cheap Shots and October Surprises in U.S. Presidential Campaigns. If there’s one thing America’s presidential candidates have always loved, it’s attacking their opponents with slurs that cast aspersions on their manhood. These past few weeks have brought the tradition to a new level of unmanning. The ever-more-fascinating Republican primary season has seen Marco Rubio belittling the size of Donald Trump’s penis, with Trump avowing in front of millions during last week’s debate: “I guarantee you there’s no problem.” Earlier that day, after Mitt Romney had denounced him as a “phony” and a “fraud,” Trump told a crowd in Portland, Maine: “I backed Mitt Romney [in 2012]. He was begging for my endorsement. I could have said ‘Mitt, drop to your knees.’ He would have dropped to his knees.” Well, at least Trump didn’t pull out his penis—“the demon rod,” as St. Augustine called it, in a nice turn of phrase—and show it to us, which is what Lyndon Johnson did when journalists asked him why America was in Vietnam. “That’s why,” Johnson reportedly said. Story Continued Below Who knows what tonight's GOP debate will bring? People sometimes forget how deeply personal, how ferociously angry, how bizarrely sexual American political contests can get when top dogs start barking. During the election of 1800, James Callender, a hack writer hired by Thomas Jefferson and his Republicans, attacked Federalist President John Adams as “a hideous hermaphroditical character which has neither the force and firmness of a man, nor the gentleness and sensibility of a woman.” (Callender’s reward for that nicely chosen “which” was eight months in jail under the Alien and Sedition Acts.) In 1835, former Congressman Davy Crockett went after the putative Democratic presidential candidate Martin Van Buren as being “laced up in corsets, such as women in a town wear, and if possible tighter than the best of them. It would be difficult to say from his personal appearance, whether he was a man or a woman, but for his large red and gray whiskers.” And when Van Buren got elected despite this, and ran again as the incumbent in 1840, a Pennsylvania Congressman named Charles Ogle made a three-day speech in the House of Representatives during the course of which he claimed that Van Buren—whom he considered a luxury-loving fop—had constructed on the White House grounds a pair of “clever sized hills” that resembled “an Amazon’s bosom, with a miniature knoll on its apex, to denote the nipple.” (Thousands of copies of the speech were printed and distributed; it helped the Whig William Henry Harrison win the election.) Gay slurs went over big in the mid-19th century, as they do today. The 1856 Democratic presidential candidate James Buchanan was long rumored by his opponents to play for the other team, possibly because of his close friendship with his longtime roommate Sen William Rufus King of Alabama. Andrew Jackson (a manly man if there ever was one) called Buchanan “Aunt Nancy,” and the nickname stuck. House Speaker Henry Clay liked to taunt Buchanan to his face, lisping: “I wish I had a more ladylike manner of expressing myself.” Of course, one of the great American presidential manliness taunters in American history was Teddy Roosevelt. Henry James called him “a monstrous embodiment of unprecedented and resounding noise,” and he is the presidential candidate whose bombast most resembles that of Donald Trump. During the election of 1904, he called the president of Venezuela “a villainous little monkey” and tagged his Democratic opponent, Judge Alton Parker, “that neutral-tinted individual”—the “low-energy” charge of its time. In 1912, during the historic election that split the Republican Party and handed the presidency to Woodrow Wilson and the Democrats (sound familiar?), Roosevelt (who would run as a Progressive “Bull Moose” Republican) showed up at the contested convention wearing a sombrero, smoking a cigar and calling President William Howard Taft a “rat in a corner.” (Unmanned, Taft could only respond weakly that Roosevelt was “neurotic.”) During that same fractious election, after letters came to light that seemed to indicate that the married Woodrow Wilson might be having an affair, Teddy sneered: “It wouldn’t work. You can’t cast a man as Romeo who looks and acts so much like an apothecary clerk.” Speaking of affairs, interestingly enough, no one has accused an opponent of actual bad behavior this year; perhaps that will have to wait for the general election. Being president can really louse up your extracurricular love life, though. Kennedy passed a plaintive note to an aide during his 1960 campaign against Richard Nixon, wondering: “I suppose if I win—my poon days are over?” They weren’t for Kennedy, as we know, but he was the last president to get away with it. But wait. Social scientists have discovered that physically posturing like an alpha—spreading one’s arms or legs wide, growling aggressively—can increase testosterone and lower cortisol (the stress hormone) in both men and women. Donald Trump is a master at posturing of this sort, hinting at his virility in ways that make people feel more than a little squirmy, particularly when it comes to his daughter Ivanka, whom he has suggested, jokingly of course, that he would like to date. Perhaps Trump is on to something. Raise your testosterone, lower your stress, and you get the benefits of an affair without complications that could ruin your chance at the presidency. If we have a contested Republican convention this summer, Trump may come swaggering in pumped up on naturally occurring testosterone from a primary trail littered with crude jokes. If that’s the case, move over Teddy Roosevelt—we may be able to crown a new king of emasculation before you can say, “Little Marco.” Of course, with Hillary Clinton as the likely Democratic nominee, if Trump makes it to the general election, he’ll be faced with an entirely new problem in presidential politics: How do you “unman” a woman? So far, for Trump, the answer to that question has been a two-step process. He insinuates Clinton is more manly than feminine: He’s made sarcastic remarks about her “pantsuits,” hinted that she’s a lesbian and remarked on the length of her “disgusting” bathroom break during the Democratic debate in December. And then, with comments like “She goes home, goes to sleep. I’m telling you. She doesn’t have the strength. She doesn’t have the stamina,” he unmans her.
/// Writes commands without additional arguments. fn write_command(&mut self, cmd: Command) -> Result<(), Error<E>> { let (command, delay, allowed_if_running) = cmd.as_tuple(); if !allowed_if_running && self.is_running { return Err(Error::NotAllowed); } i2c::write_command(&mut self.i2c, SCD4X_I2C_ADDRESS, command).map_err(Error::I2c)?; self.delay.delay_ms(delay); Ok(()) }
/** * Created by couchot on 02/10/15. */ public class GestionnaireBaseSommets extends SQLiteOpenHelper { public static final String DATABASE_NAME = "db_sommets"; public static final int DATABASE_VERSION = 1; public static final String TABLE_SOMMET = "sommets"; public static final String ID = "id"; public static final String LONGITUDE = "longitude"; public static final String LATITUDE = "latitude"; public static final String NOM = "nom"; public static final String ALTITUDE = "altitude"; public static boolean instanciee = false; public static boolean initialisee = false; public static GestionnaireBaseSommets gbs = null; public static GestionnaireBaseSommets getGestionnaireBaseSommets(Context context) { if (!instanciee) { gbs = new GestionnaireBaseSommets(context); instanciee = true; } return gbs; } private GestionnaireBaseSommets(Context context) { super(context, DATABASE_NAME, null, DATABASE_VERSION); } @Override public void onCreate(SQLiteDatabase db) { String CREATE_TABLE_SOMMETS = "CREATE TABLE " + TABLE_SOMMET + "(" + ID + " INTEGER PRIMARY KEY," + LONGITUDE + " TEXT," + LATITUDE + " TEXT," + NOM + " TEXT," + ALTITUDE + " INTEGER)"; db.execSQL(CREATE_TABLE_SOMMETS); } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { db.execSQL("DROP TABLE IF EXISTS " + TABLE_SOMMET); onCreate(db); initialisee = false; } public void initialisationAvecQuelquesSommets() { if (initialisee == false) { String reqInsertion4Sommets[] = { "INSERT INTO sommets VALUES (1116621810, '6.84486789904718', '47.8224908948974', 'Ballon d''Alsace', 1247);", "INSERT INTO sommets VALUES (1661447505, '6.89760629903985', '47.8895027948947', 'Col de Bussang', 731);", "INSERT INTO sommets VALUES (1662823209, '6.91711559903713', '47.9227979948934', 'Col d''Oderen', 884);", "INSERT INTO sommets VALUES (1708009463, '6.92187929903647', '47.9388803948927', 'Les Winterges', 1049);"}; SQLiteDatabase db = this.getWritableDatabase(); for (String s : reqInsertion4Sommets) { db.execSQL(s); } db.close(); // Closing database connection initialisee = true; } } public int combienDeSommets() { String countQuery = "SELECT * FROM " + TABLE_SOMMET; SQLiteDatabase db = this.getReadableDatabase(); Cursor cursor = db.rawQuery(countQuery, null); int c = cursor.getCount(); db.close(); // return count return c; } public void insertinitialisation() { String reqInsertion4Sommets[] = RequeteCreationSommets.Requetes; SQLiteDatabase db = this.getWritableDatabase(); for (String s : reqInsertion4Sommets) { db.execSQL(s); } db.close(); // Closing database connection initialisee = true; } public ArrayList<Sommet> getAll() { ArrayList<Sommet> resultat = new ArrayList<Sommet>(); String countQuery = "SELECT * FROM " + TABLE_SOMMET; SQLiteDatabase db = this.getReadableDatabase(); Cursor cursor = db.rawQuery(countQuery, null); if (cursor.moveToFirst()) { while (cursor.isAfterLast() == false) { // int id = Integer.parseInt(cursor.getString(cursor.getColumnIndex(ID))); float latitude = Float.parseFloat(cursor.getString(cursor.getColumnIndex(LATITUDE))); float longitude = Float.parseFloat(cursor.getString(cursor.getColumnIndex(LONGITUDE))); // float altitude = Float.parseFloat(cursor.getString(cursor.getColumnIndex(ALTITUDE))); String nom = cursor.getString(cursor.getColumnIndex(NOM)); Sommet newSommet = new Sommet(0,latitude,longitude,0,nom); resultat.add(newSommet); cursor.moveToNext(); } } cursor.moveToFirst(); return resultat; } /* public GestionnaireBaseSommets(Context context) { super(context, DATABASE_NAME, null, DATABASE_VERSION); mContext = context.getApplicationContext(); } @Override public void onCreate(SQLiteDatabase db) { String CREATE_TABLE_SOMMETS = "CREATE TABLE " + TABLE_SOMMET + "(" + ID + " INTEGER PRIMARY KEY," + LONGITUDE + " TEXT," + LATITUDE + " TEXT," + NOM + " TEXT," + ALTITUDE + " INTEGER)"; db.execSQL(CREATE_TABLE_SOMMETS); RemplissageDesSommetsAsyncTask rst = new RemplissageDesSommetsAsyncTask(this,mContext); rst.execute(RequeteCreationSommets.Requetes); } */ }
/******************************************************************************* * Copyright (c) 2001-2014 Yann-Gal Guhneuc and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the GNU Public License v2.0 * which accompanies this distribution, and is available at * http://www.gnu.org/licenses/old-licenses/gpl-2.0.html * * Contributors: * Yann-Gal Guhneuc and others, see in file; API and its implementation ******************************************************************************/ package util.io; import java.io.File; import java.io.FilenameFilter; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.ArrayList; import java.util.List; /** * @author Yann-Gal Guhneuc * @since 2004/07/21 */ public class Files { public static String getClassPath(final Class<?> aClass) { // Yann 2003/10/26: Relativity! // A user found a major drawback of this class: // Everything is hardcoded and thus prevent install // in different directory than the one specified. // I now use the client class as base for the lookup // of files. Using the client class should give (much) // more flexibility and allow (also) to load project // without specifying the full path. try { // First, I get the client class name and I replace // any '.' by a '/' to get a filesystem-style name. final String clientClassName = aClass.getName().replace('.', Files.getSeparatorChar()); // Then, I built a buffer that shall contain the full // path of the client class. final StringBuffer clientClassBuffer = new StringBuffer(clientClassName.length() + 6); clientClassBuffer.append(clientClassName); clientClassBuffer.append(".class"); // Then, I do many things: // - I get the client class as a resource using itself // (of course, it shall always exist! :-); // - I decode the corresponding URL to get a filesystem // file name; // - I get the absolute path of the client class file. try { final String clientClassAbsolutePath = new File(URLDecoder.decode(aClass .getClassLoader() .getResource(clientClassBuffer.toString()) .getFile(), "UTF-8")).getAbsolutePath(); // Finally, I remove from the client class file its name // itself to get the path to the client class, from which // I can as usuall load other resources. return clientClassAbsolutePath.substring( 0, clientClassAbsolutePath.length() - clientClassBuffer.length()); } catch (final NullPointerException npe) { return ""; } } catch (final UnsupportedEncodingException uee) { uee.printStackTrace(ProxyConsole.getInstance().errorOutput()); return ""; } } public static List<String> getRecursivelyFilenamesFromDirectory( final String aPath, final FilenameFilter aFilenameFilter) { final List<String> listOfFiles = new ArrayList<String>(); Files.getRecursivelyFilenamesFromDirectory( aPath, listOfFiles, aFilenameFilter); return listOfFiles; } private static void getRecursivelyFilenamesFromDirectory( final String aPath, final List<String> aListOfIles, final FilenameFilter aFilenameFilter) { final File pathFile = new File(aPath); final String[] subPaths = pathFile.list(); if (subPaths != null) { for (int i = 0; i < subPaths.length; i++) { final String fileName = aPath + '/' + subPaths[i]; final File file = new File(fileName); if (file.isDirectory()) { Files.getRecursivelyFilenamesFromDirectory( fileName, aListOfIles, aFilenameFilter); } else { if (aFilenameFilter.accept(new File(aPath), subPaths[i])) { aListOfIles.add(fileName); } } } } else { throw new RuntimeException( "No subdirectories with expected files in " + aPath); } } public static char getSeparatorChar() { return '/'; } public static String normalizePath(final String aPath) { return aPath.replace('\\', Files.getSeparatorChar()); } public static List<String> getJARFiles(final String aPath) { final List<String> jarFiles = new ArrayList<String>(); Files.getJARFiles0(aPath, jarFiles); return jarFiles; } private static void getJARFiles0(final String aPath, final List<String> someJARFiles) { final File path = new File(aPath); final String[] files = path.list(); for (int i = 0; i < files.length; i++) { final String file = aPath + '/' + files[i]; if (new File(file).isDirectory()) { Files.getJARFiles0(file, someJARFiles); } else if (file.endsWith(".jar")) { someJARFiles.add(file); } } } }
Correlation between Skeletal Maturation and Developmental Stages of Canines and Second Molars among Iranian Population Statement of the Problem: Growth assessment has become an important issue in many medical and dental fields. Determining the stages of dental development and skeletal maturation are essential methods in evaluation of growth phases. Purpose: This study aimed to assess the relationship between developmental stages of maxillary and mandibular canines and second molars by Nolla’s method and skeletal maturation stages by cervical vertebral maturation. In addition, diagnostic performances of dental developmental stages were evaluated to identify growth phases. Materials and Method: In this descriptive-analytical study, 201 digital panoramic and lateral cephalometry of children referred to the Orthodontic Department of Babol Dental School (8 to 15 years) were examined. The stages of dental development were determined by Nolla’s method, and the stages of skeletal development were determined by cervical vertebral maturation stages (CVMs). Positive likelihood ratio (LHR+) was used to determine the diagnostic performances to identify growth phases. The Kendall's Tau-b correlation coefficient was used to measure the association between the CVM stages and dental calcification stages. In this study, p ≤ 0.05 was considered significant. Results: The relationship between dental development and skeletal maturation in different teeth was significant (p ≤ 0.05). Kendall's Tau-b correlation coefficient between the stages of dental development and skeletal maturity in girls ranged from 0.578 - 0.634 and in boys ranged from 0.588 - 0.655. The right second molar in maxilla presented the highest correlation coefficient. The right and left second molars in maxilla and left second molar in mandible in stage 5 had the highest LHR+ to identify the pre-pubertal growth phase. Conclusion: Despite the high correlation coefficient between dental development and skeletal maturity, the LHR+ for determining growth stages in only a small number of teeth was more than 10. Introduction Recently, the optimal treatment schedule for achieving the most desirable response with the least chance of treatment failure has been considered by both researchers and physicians. Studies have shown that successful treatment of skeletal disharmonies can be predicted by initiating the individual pubertal growth spurt . Knowledge of the stage of maturation has also been well established in forensic sciences and syndrome identification . Biological or physiological maturity is introduced according to the diversity of children's development at the same chronological age. Physiological maturity is determined by the different biological indicators of maturity . Sexual maturation individuality, chronologic age, dental development, height, weight, and skeletal development are a number of the most frequent maturity values employed to identify different growth stages . There are three reliable indicators of individual skeletal maturity, including increase in stature height, skeletal maturation of the hand and wrist, and changes in the morphology of the cervical vertebrae. The use of the first two indicators in everyday clinical practice currently is limited . In contrast, the cervical vertebral maturation stages (CVMs) method, which was proposed by Baccetti et al. , is mostly accepted at present . CVMs involves all important phases in craniofacial growth during adolescence and young adulthood, is applicable for both genders, and entails no additional X-Ray exposure beyond the regular lateral cephalometric radiography . Tooth calcification and tooth eruption are two methods of evaluating tooth development. Tooth eruption is affected by malnutrition, the early loss of deciduous teeth, dental caries, and crowding; hence, it is a variable and discontinuous parameter . In addition, tooth eruption time cannot be applied between 3 to 6 years or after the age of 13; therefore, it is thought that tooth formation is a more reliable variable . There are some methods to evaluate dental development by using calcification stage. The most common method in clinical training and performance is proposed by Nolla et al. . This technique can assess the mineralization of each tooth of the maxillary and mandibular arch . Some studies have shown that this method is highly reliable in different populations . Caro and Contreras reported that Nolla's method offers more accurate results than other methods for determining dental age. Björk and Helm also reported that the maximum growth maturation in girls and boys occurs at about 12 and 14 years, respectively. As dentistry progresses straightforward and timesaving, and also panoramic radiographs are commonly used in most dental clinics, these radiographs can be used as a suitable alternative for hand-wrist radiography to evaluate individual maturity . The relationship between dental development by Demirjian method and CVMs methods in Iran has been assessed . Many studies show that Demirjian's dental development method could have been clinically useful as a skeletal maturity index, and the calcification of the second mandibular molars and mandibular canines showed the highest correlation with skeletal maturation . This study, with the help of the CVMs method and Nolla's method, was designed to evaluate the relationship between developmental stages of maxillary and mandibular canines and second molars and skeletal maturation by the morphology of cervical vertebrae in a population of Iran. Materials and Method In this cross-sectional study, 201 digital panoramic and These stages were estimated according to Nolla's method , which divides dental calcification into ten stages ( Figure 1). Skeletal stages were verified by using lateral cephalometric radiographs. These stages were assessed regarding the CVMs method suggested by Baccetti et al. . The morphology of the second, third, and fourth cervical vertebrae are analyzed in this method, and the CVM id classified into six stages (Figure 2). The characteristics of the stages are described in Table 1. In this study, the growth phases were divided into (CVMs 1-CVMs 2) as pre-pubertal, (CVMs 3-CVMs 4) as pubertal, and (CVMs 5-CVMs 6) as postpubertal . SPSS software (v.20) was used for statistical analysis; ANOVA test was used to compare the mean chronological age at different stages of CVM and teeth development in general and by gender. Due to the inequality of variances, the Games-Howell post-hoc test was used to compare the mean age between two stages of CVMs. T-Test was used to compare mean age at different CVMs and dental development between girls and boys. Kendall's Tau-b correlation coefficient was used for analyzing the correlations between CVMs and chronological age and CVMs and teeth calcification. Probability values of p≤ 0.05 were considered statically significant. Diagnostic performances were evaluated based on identifying the growth phases using LHR +LHR+estimates how the dental maturation stage changes the chances of having certain CVMs. LHR+ more than 1 indicates that the test result is related to certain CVMs, while LHR+ less than 0.1 makes it almost impossible to have certain CVMs. LHR+10 or more than 10 was considered for evaluation of satisfactory re- CVMs The lower border of c2 The lower border of c3 The lower border of c4 The shape of the body c3 A peak in mandibular growth has ended at least 2 years before this stage liability of each dental development stage for identifying each growth phase . Therefore, in this study, LHR + was considered 10 or more than 10. Results In this study, 201 digital panoramic and lateral cephalometric radiographs of 8 to 15 years old patients were examined; of whom 112 were girls (55% with an average age of 10.19) and 89 were boys (45% with an average age of 10.43). The lowest and highest frequencies were in CVMs2 and CVMs3 and the minimum and maximum ages were 8 and 15 years, respectively. Table 2 shows the frequency of calcification of maxillary and mandibular canines and second molars at each CVMs. Kendall's Tau-b correlation coefficients between chronological age and CVMs was 0.513 in general, 0.547 in girls, and 0.517 in boys; which was statistically significant (p≤ 0.05). The mean chronological age was significantly different in all stages of CVM (p≤ 0.05). The mean age difference between the two stages of CVM was generally statistically significant (p≤ 0.05); however, there was no significant difference between cs4 and cs5 in girls, cs2, cs3, cs4 and cs5 in boys (p> 0.05). The relationship between dental development stages and mean chronological age in general and by gender was statistically significant; as the dental development stages in different teeth increased, the average chronolo-gical age increased significantly too (p≤ 0.05). The mean chronological age in each dental stage in boys was higher than girls. Moreover, the results of T-Test to compare the mean of chronological age in different stages of dental development between girls and boys showed that this difference was not significant (p> 0.05). Table 3 shows Kendall's Tau-b correlation coefficients between CVMs and the stages of tooth development in different teeth. In this study, the teeth 13, 23, 43 in stage 10 of dental development had LHR+ above 10 to detect the post-puberty phase. The teeth 17, 27 and 47 in stage 5 of dental development had LHR+ above 10 to detect the pre-puberty phase. Table 4 presents sensitivity, specificity, positive predictive value (PPV) and LHR+ in teeth with LHR+ greater than 10. Discussion Identifying a rapid growth period is significantly effective in the skeletal treatment in orthodontic patients. Assessment of growth phases is also an integral part of the diagnosis and treatment program for children . Over the years, various methods have been developed to determine growth phases, including wrist radiography, elbow radiography and cervical vertebral morphology . In this study, there was a relatively high correlation coefficient between chronological age and skeletal maturity in general and concerning the gender, being higher in female participants. These results were in agreement with the study of Abesi et al. and Baidas et al. , while in the studies of Alkhal et al. , Uysal et al. and Stiehl et al. , relatively lower correlation between the chronological age and CVMs for boys and girls were reported. The differences in the results of different studies can be due to differences in race, geographical environment, selected age groups, sample size, and sample selection method. In this study, the high correlation between the chronological age and the CVMs showed that with the increase of chronological age, CVMs has increased either; however, wide variations in chronological age for different stages of the cervical vertebrae maturity showed that chronological age is not an accurate indicator for determining maturity stages. These results were in agreement with the study of Abesi et al. , Baidas et al. and Alkhal et al. . Also, studies by Baidas et al. , Alkhal et al. and Tiziano Baccetti et al. showed that chronological age is not a reliable in dicator for assessing maturity stages. The mean age difference between the two stages of cervical maturation was generally significant (p≤ 0.05); but there was no significant difference between the levels of CVMs4 and CVMs5 in girls, CVMs2 and CVMs3 and CVMs4 and CVMs5 in boys (p> 0.05). The mean chronological age at each CVMs was higher in boys than in girls, but this difference was not significant in each CVMs. As reported in the studies of Tiziano Baccetti et al. , Abesi et al. , Baidas et al. and Nemati et al. , in the present study, girls were ahead of boys in skeletal maturity and skeletal maturity in girls had begun earlier. In this study, dental development in girls occurred earlier than boys; which was in agreement with the study of Hägg and Taranger and Fishman . In addition, like the study of Nolla et al. , Sachan et al. , this difference was not significant in any of the different stages of dental development (p> 0.05). In this study, there was a significant relationship be- The correlation coefficient was higher in boys than in girls. The present study showed that despite the correlation between different stages of dental development and maturation of cervical vertebrae, overall diagnostic performance of using the teeth calcification stages to identify growth stages in many cases has low LHR+. In this study, Teeth13, 23, 43 in stage 10 of dental development, had LHR+ above 10 to detect post-pubertal phase. Teeth 17, 27 and 47 in stage 5 of dental development, had LHR+ above 10 to detect the prepuberty phase. In a study conducted by Kamal et al. , tooth 33 in stages 9 and 10 of dental development showed CVMs2 and CVMs3 and tooth 37 in 8-10 dental development stages represented CVMs3. In a study conducted by Al- Balbeesi et al. , tooth 33 in stages 9 and 10 of dental development represented pubertal phase. In the study of Günen Yılmaz et al. , tooth 37 in stage 8-7 of dental development represented the prepubertal phase, and stage 10 of dental development represented the post-pubertal phase. In a study conducted by Sachan et al. , tooth 33 in the stage between 8 and 9 of dental development indicated the early stages of developmental maturation. The various results expressed in the studies may be related to different methods of data collection, different methods of determining the stages of skeletal or dental maturation, geographical environment, selected age groups, racial changes, and size of the sample. There were some limitations in the present study which would be considered while interpreting the results. The radiographs were taken by different devices which may affect the quality of radiographs and the diagnosis of developmental stages. On the other hand, the study samples were predominantly from one province of Iran and the results cannot be easily generalized to Iranian population. Thus, it is recommended that the sample size should be larger and from all provinces of Iran to confirm the present results in further extensive studies. Conclusion This study showed that in this population, the relationship between the tooth calcification by Nolla's method and the maturation of cervical vertebrae was significant. Tooth 17 presented the highest relationship in both genders. Despite the high correlation coefficient between the dental calcification stages and the CVMs, diagnostic ability of using the teeth calcification stages to identify growth phases would be limited. Although Calcification stages of teeth 13, 23 and 43 in stage 10 had a satisfactory diagnostic value for predicting post-pubertal phase, and teeth 17, 27 and 47 in stage 5 of dental development had a satisfactory diagnostic value for predicting prepubertal phase, but the calcification stages were not reliable for detecting pubertal phase.
import { serializeOverflow } from './core/overflow' import { serializeDisplay } from './core/display' import { serializeWidth } from './core/width' import { serializeHeight } from './core/height' import { serializeBackgroundColor } from './core/background-color' import { serializeBackgroundOrigin } from './core/background-origin' import { serializeTop, serializeRight, serializeBottom, serializeLeft } from './core/box-offsets' import { serializeBoxSizingValue } from './core/box-sizing' import { serializeColor } from './core/color' import { serializeMaxHeight } from './core/max-height' import { serializeMaxWidth } from './core/max-width' import { serializeMinHeightValue } from './core/min-height' import { serializeMinWidth } from './core/min-width' import { serializeOpacity } from './core/opacity' import { serializeJustifyContent } from './core/justify-content' import { serializeBorderColor, serializeBorderTopColor, serializeBorderRightColor, serializeBorderBottomColor, serializeBorderLeftColor, } from './core/border-color' import { serializeCursor } from './core/cursor' import { serializeBorderTopRightRadius, serializeBorderBottomRightRadius, serializeBorderBottomLeftRadius, serializeBorderTopLeftRadius, serializeBorderRadius, } from './core/border-radius' import { serializeBorderTopStyle, serializeBorderRightStyle, serializeBorderBottomStyle, serializeBorderLeftStyle, serializeBorderStyle, } from './core/border-style' import { serializeBorderTopWidth, serializeBorderRightWidth, serializeBorderBottomWidth, serializeBorderLeftWidth, serializeBorderWidth, } from './core/border-width' //import { serializeTextDecorationValue } from './core/text-decoration' import { serializeBorderTop, serializeBorderRight, serializeBorderBottom, serializeBorderLeft, serializeBorder, } from './core/border' import { serializeFontSize } from './core/font-size' import { serializeFontFamily } from './core/font-family' import { serializeFlexBasis } from './core/flexbox/flex-basis' import { serializeFlexDirectionValue } from './core/flexbox/flex-direction' import { serializeFlexGrow } from './core/flexbox/flex-grow' import { serializeFlexShrink } from './core/flexbox/flex-shrink' import { serializeFlexWrap } from './core/flexbox/flex-wrap' import { serializeFlex } from './core/flexbox/flex' import { serializeLetterSpacing } from './core/letter-spacing' import { serializeLineHeight } from './core/line-height' import { serializeMarginTopValue, serializeMarginRightValue, serializeMarginBottomValue, serializeMarginLeftValue, serializeMargin, serializeMarginX, serializeMarginY, } from './core/margin' import { serializePaddingTopValue, serializePaddingRightValue, serializePaddingBottomValue, serializePaddingLeftValue, serializePadding, serializePaddingX, serializePaddingY, } from './core/padding' import { serializeTransitionProperty } from './core/transition-property' import { serializeTransitionDelay } from './core/transition-delay' import { serializeTransitionTimingFunction } from './core/transition-timing-function' import { serializeTransition } from './core/transition' import { serializeTransitionDuration } from './core/transition-duration' import { serializeBoxShadow } from './core/box-shadow' import { serializeAlignItems } from './core/align-items' import { serializeTransform } from './core/transform' import { serializeTransformOrigin } from './core/transform-origin' import { serializePerspectiveOrigin } from './core/perspective-origin' import { serializePerspectivePropertyValue } from './core/perspective' import { serializeFontStretch } from './core/font-stretch' import { serializeFontSizeAdjust } from './core/font-size-adjust' import { serializeFontVariationSettings } from './core/font-variation-settings' import { serializeFontSynthesis } from './core/font-synthesis' import { serializeAlignContent } from './core/align-content' import { serializeFontVariantLigatures } from './core/font-variant-ligatures' import { serializeBackgroundImage } from './core/background-image' import { serializeBackgroundPosition } from './core/background-position' import { serializeWordSpacing } from './core/word-spacing' import { serializeAlignSelf } from './core/align-self' import { serializeBackgroundSize } from './core/background-size' import { serializeBackfaceVisibility } from './core/backface-visibility' import { serializeAnimationName } from './core/animation-name' import { serializeAnimationDuration } from './core/animation-duration' import { serializeAnimationTimingFunction } from './core/animation-timing-function' import { serializeAnimationDelay } from './core/animation-delay' import { serializeAnimationIterationCount } from './core/animation-iteration-count' import { serializeAnimationDirection } from './core/animation-direction' import { serializeAnimationPlayState } from './core/animation-play-state' import { serializeAnimationFillMode } from './core/animation-fill-mode' import { serializeAnimation } from './core/animation' import { serializeWillChange } from './core/will-change' import { serializeVolume } from './core/volume' import { serializeVerticalAlign } from './core/vertical-align' import { serializeTextUnderlinePosition } from './core/text-underline-position' import { serializeTextUnderlineOffset } from './core/text-underline-offset' import { serializeVoiceVolume } from './core/voice-volume' import { serializeVoiceRate } from './core/voice-rate' import { serializeTextTransform } from './core/text-transform' import { serializeTextEmphasisColor } from './core/text-emphasis-color' import { serializeTextEmphasisSkip } from './core/text-emphasis-skip' import { serializeTextEmphasisPosition } from './core/text-emphasis-position' import { serializeTexIndent } from './core/text-indent' import { serializeOutlineColor } from './core/outline-color' import { serializeOutlineWidth } from './core/outline-width' import { serializeOutline } from './core/outline' import { serializeFontKerning } from './core/font-kerning' import { serializeFontOpticalSizing } from './core/font-optical-sizing' import { serializeFontStyle } from './core/font-style' import { serializejustifySelf } from './core/justify-self' import { serializejustifyItems } from './core/justify-items' import { serializeBorderImageSource } from './core/border-image-source' import { serializeBorderImageSlice } from './core/border-image-slice' import { serializeBorderImageWidth } from './core/border-image-width' import { serializeBorderImageOutset } from './core/border-image-outset' import { serializeBorderImageRepeat } from './core/border-image-repeat' import { serializeBorderCollapse } from './core/border-collapse' import { serializeBorderSpacing } from './core/border-spacing' import { serializeBorderImage } from './core/border-image' import { serializeBreakBefore } from './core/break-before' import { serializeBreakAfter } from './core/break-after' import { serializeBreakInside } from './core/break-inside' import { serializeListStyleImage } from './core/list-style-image' import { serializeListStylePosition } from './core/list-style-position' import { serializeListStyleType } from './core/list-style-type' import { serializeListStyle } from './core/list-style' import { serializebackgroundClip } from './core/background-clip' import { serializeBackgroundRepeat } from './core/background-repeat' export const funcMap: any = (type: 'inline' | 'css') => ({ alignContent: serializeAlignContent(type), alignItems: serializeAlignItems(type), animation: serializeAnimation, animationDuration: serializeAnimationDuration(type), animationTimingFunction: serializeAnimationTimingFunction(type), animationDelay: serializeAnimationDelay(type), animationIterationCount: serializeAnimationIterationCount(type), animationDirection: serializeAnimationDirection(type), animationPlayState: serializeAnimationPlayState(type), animationFillMode: serializeAnimationFillMode(type), borderTopColor: serializeBorderTopColor(type), borderRightColor: serializeBorderRightColor(type), borderBottomColor: serializeBorderBottomColor(type), borderLeftColor: serializeBorderLeftColor(type), borderColor: serializeBorderColor(type), borderTopRightRadius: serializeBorderTopRightRadius(type), borderBottomRightRadius: serializeBorderBottomRightRadius(type), borderBottomLeftRadius: serializeBorderBottomLeftRadius(type), borderTopLeftRadius: serializeBorderTopLeftRadius(type), borderRadius: serializeBorderRadius(type), borderTop: serializeBorderTop(type), borderRight: serializeBorderRight(type), borderBottom: serializeBorderBottom(type), borderLeft: serializeBorderLeft(type), border: serializeBorder(type), borderTopStyle: serializeBorderTopStyle(type), borderRightStyle: serializeBorderRightStyle(type), borderBottomStyle: serializeBorderBottomStyle(type), borderLeftStyle: serializeBorderLeftStyle(type), borderStyle: serializeBorderStyle(type), borderSpacing: serializeBorderSpacing(type), borderImage: serializeBorderImage(type), borderTopWidth: serializeBorderTopWidth(type), borderRightWidth: serializeBorderRightWidth(type), borderBottomWidth: serializeBorderBottomWidth(type), borderLeftWidth: serializeBorderLeftWidth(type), borderWidth: serializeBorderWidth(type), borderImageSource: serializeBorderImageSource(type), borderImageSlice: serializeBorderImageSlice(type), borderImageWidth: serializeBorderImageWidth(type), borderImageOutset: serializeBorderImageOutset(type), borderImageRepeat: serializeBorderImageRepeat(type), borderCollapse: serializeBorderCollapse(type), backgroundColor: serializeBackgroundColor(type), backgroundImage: serializeBackgroundImage(type), backgroundPosition: serializeBackgroundPosition(type), boxShadow: serializeBoxShadow(type), breakBefore: serializeBreakBefore, breakAfter: serializeBreakAfter, breakInside: serializeBreakInside, cursor: serializeCursor, display: serializeDisplay, width: serializeWidth, height: serializeHeight, fontStyle: serializeFontStyle(type), fontKerning: serializeFontKerning(type), fontOpticalSizing: serializeFontOpticalSizing(type), listStyleImage: serializeListStyleImage, listStylePosition: serializeListStylePosition, listStyleType: serializeListStyleType, listStyle: serializeListStyle, minHeight: serializeMinHeightValue, maxHeight: serializeMaxHeight, minWidth: serializeMinWidth, maxWidth: serializeMaxWidth, color: serializeColor, top: serializeTop, right: serializeRight, bottom: serializeBottom, left: serializeLeft, flexBasis: serializeFlexBasis(type), flexDirection: serializeFlexDirectionValue(type), flexGrow: serializeFlexGrow(type), flexShrink: serializeFlexShrink(type), boxSizing: serializeBoxSizingValue, opacity: serializeOpacity, overflow: serializeOverflow, //textDecoration: serializeTextDecorationValue, justifyContent: serializeJustifyContent(type), justifySelf: serializejustifySelf(type), justifyItems: serializejustifyItems(type), textTransform: serializeTextTransform, textEmphasisColor: serializeTextEmphasisColor, textEmphasisSkip: serializeTextEmphasisSkip, textEmphasisPosition: serializeTextEmphasisPosition, textIndent: serializeTexIndent, fontSize: serializeFontSize(type), fontStretch: serializeFontStretch(type), fontSizeAdjust: serializeFontSizeAdjust(type), fontFamily: serializeFontFamily(type), fontVariationSettings: serializeFontVariationSettings(type), fontSynthesis: serializeFontSynthesis(type), flexWrap: serializeFlexWrap(type), flex: serializeFlex, letterSpacing: serializeLetterSpacing(type), lineHeight: serializeLineHeight(type), marginTop: serializeMarginTopValue(type), marginRight: serializeMarginRightValue(type), marginBottom: serializeMarginBottomValue(type), marginLeft: serializeMarginLeftValue(type), margin: serializeMargin, marginX: serializeMarginX, marginY: serializeMarginY, paddingTop: serializePaddingTopValue, paddingRight: serializePaddingRightValue, paddingBottom: serializePaddingBottomValue, paddingLeft: serializePaddingLeftValue, padding: serializePadding, paddingX: serializePaddingX, paddingY: serializePaddingY, transitionProperty: serializeTransitionProperty, transitionDelay: serializeTransitionDelay, transitionTimingFunction: serializeTransitionTimingFunction, transition: serializeTransition, transitionDuration: serializeTransitionDuration, animationName: serializeAnimationName(type), transform: serializeTransform, transformOrigin: serializeTransformOrigin, perspectiveOrigin: serializePerspectiveOrigin, perspective: serializePerspectivePropertyValue, fontVariantLigatures: serializeFontVariantLigatures(type), wordSpacing: serializeWordSpacing, alignSelf: serializeAlignSelf(type), backgroundSize: serializeBackgroundSize(type), backgroundOrigin: serializeBackgroundOrigin, backgroundClip: serializebackgroundClip, backgroundRepeat: serializeBackgroundRepeat, backfaceVisibility: serializeBackfaceVisibility(type), willChange: serializeWillChange, volume: serializeVolume, verticalAlign: serializeVerticalAlign, textUnderlinePosition: serializeTextUnderlinePosition, textUnderlineOffset: serializeTextUnderlineOffset, voiceVolume: serializeVoiceVolume, voiceRate: serializeVoiceRate, outlineColor: serializeOutlineColor, outlineWidth: serializeOutlineWidth, outline: serializeOutline, })
/** * Metric assessor to assess the {@link FeedOnTimeArrivalMetric} */ public class FeedOnTimeArrivalMetricAssessor implements MetricAssessor<FeedOnTimeArrivalMetric, Serializable> { private static final Logger LOG = LoggerFactory.getLogger(FeedOnTimeArrivalMetricAssessor.class); @Inject private OpsManagerFeedProvider feedProvider; @Inject private MetadataAccess metadataAccess; /* (non-Javadoc) * @see com.thinkbiganalytics.metadata.sla.spi.MetricAssessor#accepts(com.thinkbiganalytics.metadata.sla.api.Metric) */ @Override public boolean accepts(Metric metric) { return metric instanceof FeedOnTimeArrivalMetric; } /* (non-Javadoc) * @see com.thinkbiganalytics.metadata.sla.spi.MetricAssessor#assess(com.thinkbiganalytics.metadata.sla.api.Metric, com.thinkbiganalytics.metadata.sla.spi.MetricAssessmentBuilder) */ @Override @SuppressWarnings("unchecked") public void assess(FeedOnTimeArrivalMetric metric, MetricAssessmentBuilder builder) { LOG.debug("Assessing metric: ", metric); builder.metric(metric); String feedName = metric.getFeedName(); DateTime lastFeedTime = feedProvider.getLastActiveTimeStamp(feedName); HashMap<String,String> data = new HashMap<>(); data.put("feed",feedName); Long nowDiff = 0L; Period nowDiffPeriod = new Period(nowDiff.longValue()); if(lastFeedTime != null) { nowDiff = DateTime.now().getMillis() - lastFeedTime.getMillis(); nowDiffPeriod = new Period(nowDiff.longValue()); } Long latePeriodMillis = metric.getLatePeriod().toStandardDuration().getMillis(); Long duration = CronExpressionUtil.getCronInterval(metric.getExpectedExpression()); Period acceptedPeriod = new Period(duration + latePeriodMillis); Date expectedDate = CronExpressionUtil.getPreviousFireTime(metric.getExpectedExpression()); DateTime expectedTime = new DateTime(expectedDate); LOG.debug("Calculated the Expected Date to be {} ", expectedTime); DateTime lateTime = expectedTime.plus(metric.getLatePeriod()); LOG.debug("CurrentTime is: {}. Comparing {} against the lateTime of {} ", DateTime.now(), lastFeedTime, lateTime); builder.compareWith(expectedDate, feedName); data.put("expectedTime",expectedTime.toString()); data.put("expectedTimeMillis",expectedTime.getMillis()+""); data.put("lateTime",expectedTime.toString()); data.put("lateTimeMillis",expectedTime.getMillis()+""); if(lastFeedTime != null ){ data.put("lastFeedTime",lastFeedTime.toString()); data.put("lastFeedTimeMillis",lastFeedTime.getMillis()+""); } builder.data(data); if (lastFeedTime == null) { LOG.debug("Feed with the specified name {} not found", feedName); builder.message("Feed with the specified name " + feedName + " not found ") .result(AssessmentResult.WARNING); } else if (lastFeedTime.isAfter(expectedTime) && lastFeedTime.isBefore(lateTime)) { LOG.debug("Data for feed {} arrived on {}, which was before late time: {}", feedName, lastFeedTime, lateTime); builder.message("Data for feed " + feedName + " arrived on " + lastFeedTime + ", which was before late time: " + lateTime) .result(AssessmentResult.SUCCESS); } else if(lastFeedTime.isAfter(lateTime)){ LOG.debug("Data for feed {} has not arrived before the late time: {} ", feedName, lateTime); builder.message("Data for feed " + feedName + " has not arrived before the late time: " + lateTime + "\n The last successful feed was on " + lastFeedTime) .result(AssessmentResult.FAILURE); } else if (nowDiff <= (duration + latePeriodMillis)) { LOG.debug("Data for feed {} has arrived before the late time: {}. The last successful feed was on {}. It has been {} since data has arrived. The allowed duration is {} ", feedName, lateTime, lastFeedTime, DateTimeUtil.formatPeriod(nowDiffPeriod), DateTimeUtil.formatPeriod(acceptedPeriod)); builder.message("Data for feed " + feedName + " has arrived on time. \n The last successful feed was on " + lastFeedTime + ". It has been " + DateTimeUtil .formatPeriod(nowDiffPeriod) + " since data has arrived. The allowed duration is " + DateTimeUtil.formatPeriod(acceptedPeriod)) .result(AssessmentResult.SUCCESS); } else if (nowDiff > (duration + latePeriodMillis)) { //error its been greater that the duration of the cron + lateTime LOG.debug("Data for feed {} has not arrived before the late time: {}. The last successful feed was on {}. It has been {} since data has arrived. The allowed duration is {} ", feedName, lateTime, lastFeedTime, DateTimeUtil.formatPeriod(nowDiffPeriod), DateTimeUtil.formatPeriod(acceptedPeriod)); builder.message("Data for feed " + feedName + " has not arrived on time. \n The last successful feed was on " + lastFeedTime + ". It has been " + DateTimeUtil .formatPeriod(nowDiffPeriod) + " since data has arrived. The allowed duration is " + DateTimeUtil.formatPeriod(acceptedPeriod)) .result(AssessmentResult.FAILURE); } else if (DateTime.now().isBefore(lateTime)) { //&& lastFeedTime.isBefore(expectedTime) LOG.debug("CurrentTime {} is before the lateTime of {}. Not Assessing", DateTime.now(), lateTime); return; } else { LOG.debug("Data for feed {} has not arrived before the late time: {} ", feedName, lateTime); builder.message("Data for feed " + feedName + " has not arrived before the late time: " + lateTime + "\n The last successful feed was on " + lastFeedTime) .result(AssessmentResult.FAILURE); } } public MetadataAccess getMetadataAccess() { return metadataAccess; } public void setMetadataAccess(MetadataAccess metadataAccess) { this.metadataAccess = metadataAccess; } }
<reponame>Ciubix8513/Lunar-Math #pragma once #ifndef _VECTOR3_H_ #define _VECTOR3_H_ #include "Vector2.h" #define vec3 Vector3 namespace LunarMath { struct Vector3int { public: Vector3int(int NewX, int NewY, int NewZ); Vector3int(); int x, y, z; bool operator==(Vector3int other); friend bool operator==(const Vector3int& lhs, const Vector3int& rhs); bool operator!= (Vector3int other); }; class Vector3 { public: std::string ToString() const; #pragma region VectorCreation + Consts Vector3(float f); Vector3(float NewX, float NewY, float NewZ); Vector3(const Vector2& NewXY, float NewZ); Vector3(float NewX, const Vector2& NewYZ); Vector3(); static Vector3 Up(); static Vector3 Down(); static Vector3 Forward(); static Vector3 Backward(); static Vector3 Right(); static Vector3 Left(); static Vector3 Zero(); #pragma endregion #pragma region Functions float Length() const; static float Length(const Vector3& v); Vector3 Normalized() const; Vector3& Normalize(); static Vector3 Normalized(const Vector3& v); static Vector3& Normalize(Vector3& v); static float DotProduct(const Vector3& a, const Vector3& b); static Vector3 CrossProduct(const Vector3& a, const Vector3& b); static Vector3 Lerp(const Vector3& a, const Vector3& b, float t); #pragma endregion #pragma region operators float operator[](const int& i) const; float& operator[](const int& i); Vector3& operator=(Vector3 v); Vector3 operator+(const Vector3& v) const; Vector3 operator-(const Vector3& v) const; Vector3 operator*(float c) const; Vector3 operator/(float c) const; float operator*(const Vector3& c) const; Vector3& operator+=(const Vector3& v); Vector3& operator-=(const Vector3& v); Vector3& operator*=(float c); Vector3& operator/=(float c); bool operator==(const Vector3& v) const; bool operator!=(const Vector3& v) const; #pragma endregion float x, y, z; }; }; #endif
Story highlights Howard Kurtz: Sean Hannity-Keith Ellison dust-up made for good TV He says Fox host was just doing interview, and Ellison picked fight on spending cuts He says escalation, name-calling a sad reflection of coarsened political discourse Kurtz: Each man may have stood his ground on principle, but viewers lost out When I saw the headlines about Sean Hannity getting into a shouting match with a Democratic congressman this week, I assumed the combative and conservative Fox News host had just gone off on him. I was wrong. Hannity was trying to conduct what we in journalism call an interview. But from the first words out of his mouth, Rep. Keith Ellison came prepared to pick a fight. It was good television, I suppose, but it's hard to say it was enlightening. Howard Kurtz Ellison began insulting Hannity from the get-go, Hannity pushed back, and the pugilistics were under way. After I criticized Ellison in an online video , Fox-bashers, Hannity-haters and others started with the nasty tweets. But it was Ellison, not Hannity, who had started throwing verbal punches. Said one tweet: "Even psychotic liberal Howard Kurtz thinks Keith Ellison was out of line with Sean Hannity." (For the record, I consider myself reasonably sane.) I've never been shy about criticizing Hannity for mangling the facts or spouting the GOP's talking points. But in this case, he wasn't at fault. This wasn't a situation where both sides were engaged in an argument that grew more heated. Why would the Minnesota lawmaker launch a premeditated assault? Perhaps it's good politics for a liberal Democrat to force a confrontation with Hannity. He can brag to his base about beating up on one of Fox's most popular pundits. In fact, at least one liberal group is already trying to raise money off the incident -- another example of how unbridled partisanship can be profitable. Ellison told me: "I stood up to Sean Hannity because of what the sequester will mean for millions of Americans who have already been forced to work harder while they get by with less. If these devastating cuts in the sequester go into effect at the end of this week, three-quarters of a million jobs will be lost. ... "I have constituents in Minnesota who will lose their jobs because Republicans in Congress are unwilling to close loopholes for yacht owners. The president is not being an 'alarmist in chief' or 'President Panic' when he talks about these cuts; he is leading." A person close to Ellison, who would speak only on condition of anonymity, told me that the congressman allowed Hannity to get under his skin because he felt the host had taken President Barack Obama's words out of context. Here's how it got going: Hannity had played a series of brief clips of Obama and began with this highly partisan comment: "If and when the Obama sequester takes effect on Friday, it's because the president is more concerned with fear-mongering than finding a solution to the problem that he himself created." No sooner did he tell Ellison, "Welcome back, sir," than the congressman took, shall we say, vigorous issue with Hannity's opening. "Quite frankly, you are the worst excuse for a journalist I've ever seen," said Ellison, who then accused him of "yellow journalism." Hannity's not a journalist, he's a commentator paid for his conservative opinions, but let's not quibble. Ellison was getting revved up again when Hannity tried to make a factual point. "What the president said was dead-on accurate, and for you to say the president is to blame here is ridiculous. I was there August 2011 when the Republicans, your party, which you shamelessly. ..." Hannity interrupted: "I'm not a Republican, let me correct the record." "You are nothing but a Republican," Ellison said. Hannity got out that he is a conservative, not a registered Republican, whereupon Ellison accused him of being "a shill for the Republican Party." Hannity -- who does carry the party's water most of the time -- allowed Ellison to go on for a couple of minutes. Then he tried to ask a question and the congressman repeatedly talked over him. That is pretty much how it went for the rest of the segment, with Ellison at one point calling Hannity "immoral." Ellison accepted the invitation to come on and then showed more interest in name-calling than having a dialogue. The shout fest was, perhaps, a microcosm of what's become of our media and political dialogue. There is so much noise in Washington that those who want to break through sometimes feel compelled to keep boosting the decibel level. Debate is reduced to each side yelling at each other rather than engaging on the issues. How different was the Hannity/Ellison clash than House Speaker John Boehner telling the Senate to "get off their ass" and pass a budget? Obama and the Republicans have kept hurling accusations of bad faith at one another until the automatic budget cuts that both sides once deemed unthinkable loomed. The thinking seems to be: If there's no possibility of compromise, why not just take potshots and try to win the news cycle? Most cable news anchors and political players don't descend to that level. But can anyone deny that the media have contributed to a coarsening of the political culture? Hannity, who finds new reasons to attack Obama every day, is hardly the poster boy for bipartisanship. But in this instance he did invite a politician with liberal views and attempt to be civil. Maybe they both end up benefiting from standing their ground against a fierce opponent. But it's viewers who lost out.
Washington is closely monitoring the relations between Russia and Germany and will do anything to prevent the union between the two countries, founder of the global intelligence company Stratfor, George Friedman, said. © AP Photo / Stringer US Plans to Modernize Nukes in Germany Cause Concern to Russia According to Friedman, the formation of a partnership between Russia and Germany could give an impetus to the strengthening of both countries. However, such a development would pose a threat to US interests, Friedman said in an interview with German newspaper Deutsche Wirtschafts Nachrichten (DWN). In Friedman's opinion, Germany is currently under enormous pressure and is very vulnerable politically and economically. "Europe is in a process of institutional disintegration," the expert said. "Each state is trying to go its own way." According to Friedman, Germany is highly dependent on its export markets and therefore needs cooperation with other countries. However, the choice of partners for the German government is quite limited. Rapprochement with Russia would be particularly favorable for Germany in all respects, but it would cause great resentment from the United States, Friedman argued. According to the analyst, Washington will do anything to prevent the improvement of relations between the two countries. Moreover, there are countries which are not interested in cooperation between Moscow and Berlin, such as Poland and France. "It is not just the United States that wants to prevent an alliance between Germany and Russia. There is not a single country in Europe that would support such an alliance. Poland and France are for example vehement opponents of such a union. An alliance between Germany and Russia would lead to fear and terror in Europe," the expert concluded.
/* * appendFunctionName * Deparses function name from given function oid. */ static void appendFunctionName(Oid funcid, deparse_expr_cxt *context) { StringInfo buf = context->buf; HeapTuple proctup; Form_pg_proc procform; const char *proname; proctup = SearchSysCache1(PROCOID, ObjectIdGetDatum(funcid)); if (!HeapTupleIsValid(proctup)) { elog(ERROR, "cache lookup failed for function %u", funcid); } procform = (Form_pg_proc) GETSTRUCT(proctup); if (procform->pronamespace != PG_CATALOG_NAMESPACE) { const char *schemaname; schemaname = get_namespace_name(procform->pronamespace); appendStringInfo(buf, "%s.", quote_identifier(schemaname)); } proname = NameStr(procform->proname); appendStringInfoString(buf, quote_identifier(proname)); ReleaseSysCache(proctup); }
// Determine whether the string is a IPv6 mask bool IsIpMask6(char *str) { IP mask; if (str == NULL) { return false; } return StrToMask6(&mask, str); }
/* Module imports */ import { TestBed, ComponentFixture } from '@angular/core/testing'; import { IonicModule } from 'ionic-angular'; import { NoopAnimationsModule } from '@angular/platform-browser/animations'; import { By } from '@angular/platform-browser'; import { SimpleChange } from '@angular/core'; /* Test configuration imports */ import { configureTestBed } from '../../../test-config/configureTestBed'; /* Component imports */ import { AccordionComponent } from './accordion'; describe('Accordion Component', () => { let fixture: ComponentFixture<AccordionComponent>; let accordion: AccordionComponent; configureTestBed(); beforeAll(done => (async() => { TestBed.configureTestingModule({ declarations: [ AccordionComponent ], imports: [ IonicModule, NoopAnimationsModule ], providers: [] }); await TestBed.compileComponents(); })() .then(done) .catch(done.fail)); beforeEach(() => { fixture = TestBed.createComponent(AccordionComponent); accordion = fixture.componentInstance; }); test('should create the component', () => { fixture.detectChanges(); expect(accordion).toBeDefined(); }); // end 'should create the component' test test('should toggle expansion', () => { fixture.detectChanges(); const accElem = fixture.debugElement.query(By.css('.expand-accordion')); expect(accElem.properties['@expandUpDown'].value).toMatch('collapsed'); expect(accElem.properties['@expandUpDown'].params.height).toBe(0); accordion.ngOnChanges({ expanded: new SimpleChange(false, true, false) }); fixture.detectChanges(); expect(accElem.properties['@expandUpDown'].value).toMatch('expanded'); accordion.ngOnChanges({ expanded: new SimpleChange(false, false, false) }); fixture.detectChanges(); expect(accElem.properties['@expandUpDown'].value).toMatch('collapsed'); }); // end 'should toggle expansion' test });
def all_operations(): cle_ops = collect_from_pyclesperanto_if_installed() tools_ops = collect_from_tools_menu_if_installed() npe2_ops = collect_from_npe2_if_installed() all_ops = {**cle_ops, **tools_ops, **npe2_ops} return all_ops
<filename>static/javascript/config/index.ts<gh_stars>1-10 import getRaces from "./races"; import getArmors from "./armors"; import getMounts from "./mounts"; import getWeapons from "./weapons"; import getFood from "./food"; const RACES = getRaces(); const ARMORS = getArmors(); const MOUNTS = getMounts(); const WEAPONS = getWeapons(); const FOOD = getFood(); export { RACES, ARMORS, MOUNTS, WEAPONS, FOOD };
/** * If nodeName contains spark and hdinsight, we just think it is a spark node. * So set the service name to hdinsight * @param serviceName * @return */ private String transformHDInsight(String serviceName, Node node) { try { if (serviceName.equals(TelemetryConstants.ACTION)) { String nodeName = node.getName().toLowerCase(); if (nodeName.contains("spark") || nodeName.contains("hdinsight")) { return TelemetryConstants.HDINSIGHT; } if (node.getParent() != null) { String parentName = node.getParent().getName().toLowerCase(); if (parentName.contains("spark") || parentName.contains("hdinsight")) { return TelemetryConstants.HDINSIGHT; } } } return serviceName; } catch (Exception ignore) { } return serviceName; }
# Copyright (c) Meta Platforms, Inc. and affiliates. # All rights reserved. # This source code is licensed under the license found in the # LICENSE file in the root directory of this source tree. import torch from torch import Tensor, nn import math from typing import Tuple, Type from .common import MLPBlock class TwoWayTransformer(nn.Module): def __init__( self, depth: int, embedding_dim: int, num_heads: int, mlp_dim: int, activation: Type[nn.Module] = nn.ReLU, attention_downsample_rate: int = 2, ) -> None: """ A transformer decoder that attends to an input image using queries whose positional embedding is supplied. Args: depth (int): number of layers in the transformer embedding_dim (int): the channel dimension for the input embeddings num_heads (int): the number of heads for multihead attention. Must divide embedding_dim mlp_dim (int): the channel dimension internal to the MLP block activation (nn.Module): the activation to use in the MLP block """ super().__init__() self.depth = depth self.embedding_dim = embedding_dim self.num_heads = num_heads self.mlp_dim = mlp_dim self.layers = nn.ModuleList() for i in range(depth): self.layers.append( TwoWayAttentionBlock( embedding_dim=embedding_dim, num_heads=num_heads, mlp_dim=mlp_dim, activation=activation, attention_downsample_rate=attention_downsample_rate, skip_first_layer_pe=(i == 0), ) ) self.final_attn_token_to_image = Attention( embedding_dim, num_heads, downsample_rate=attention_downsample_rate ) self.norm_final_attn = nn.LayerNorm(embedding_dim) def forward( self, image_embedding: Tensor, image_pe: Tensor, point_embedding: Tensor, ) -> Tuple[Tensor, Tensor]: """ Args: image_embedding (torch.Tensor): image to attend to. Should be shape B x embedding_dim x h x w for any h and w. image_pe (torch.Tensor): the positional encoding to add to the image. Must have the same shape as image_embedding. point_embedding (torch.Tensor): the embedding to add to the query points. Must have shape B x N_points x embedding_dim for any N_points. Returns: torch.Tensor: the processed point_embedding torch.Tensor: the processed image_embedding """ # BxCxHxW -> BxHWxC == B x N_image_tokens x C bs, c, h, w = image_embedding.shape image_embedding = image_embedding.flatten(2).permute(0, 2, 1) image_pe = image_pe.flatten(2).permute(0, 2, 1) # Prepare queries queries = point_embedding keys = image_embedding # Apply transformer blocks and final layernorm for layer in self.layers: queries, keys = layer( queries=queries, keys=keys, query_pe=point_embedding, key_pe=image_pe, ) # Apply the final attenion layer from the points to the image q = queries + point_embedding k = keys + image_pe attn_out = self.final_attn_token_to_image(q=q, k=k, v=keys) queries = queries + attn_out queries = self.norm_final_attn(queries) return queries, keys class TwoWayAttentionBlock(nn.Module): def __init__( self, embedding_dim: int, num_heads: int, mlp_dim: int = 2048, activation: Type[nn.Module] = nn.ReLU, attention_downsample_rate: int = 2, skip_first_layer_pe: bool = False, ) -> None: """ A transformer block with four layers: (1) self-attention of sparse inputs, (2) cross attention of sparse inputs to dense inputs, (3) mlp block on sparse inputs, and (4) cross attention of dense inputs to sparse inputs. Arguments: embedding_dim (int): the channel dimension of the embeddings num_heads (int): the number of heads in the attention layers mlp_dim (int): the hidden dimension of the mlp block activation (nn.Module): the activation of the mlp block skip_first_layer_pe (bool): skip the PE on the first layer """ super().__init__() self.self_attn = Attention(embedding_dim, num_heads) self.norm1 = nn.LayerNorm(embedding_dim) self.cross_attn_token_to_image = Attention( embedding_dim, num_heads, downsample_rate=attention_downsample_rate ) self.norm2 = nn.LayerNorm(embedding_dim) self.mlp = MLPBlock(embedding_dim, mlp_dim, activation) self.norm3 = nn.LayerNorm(embedding_dim) self.norm4 = nn.LayerNorm(embedding_dim) self.cross_attn_image_to_token = Attention( embedding_dim, num_heads, downsample_rate=attention_downsample_rate ) self.skip_first_layer_pe = skip_first_layer_pe def forward( self, queries: Tensor, keys: Tensor, query_pe: Tensor, key_pe: Tensor ) -> Tuple[Tensor, Tensor]: # Self attention block if self.skip_first_layer_pe: queries = self.self_attn(q=queries, k=queries, v=queries) else: q = queries + query_pe attn_out = self.self_attn(q=q, k=q, v=queries) queries = queries + attn_out queries = self.norm1(queries) # Cross attention block, tokens attending to image embedding q = queries + query_pe k = keys + key_pe attn_out = self.cross_attn_token_to_image(q=q, k=k, v=keys) queries = queries + attn_out queries = self.norm2(queries) # MLP block mlp_out = self.mlp(queries) queries = queries + mlp_out queries = self.norm3(queries) # Cross attention block, image embedding attending to tokens q = queries + query_pe k = keys + key_pe attn_out = self.cross_attn_image_to_token(q=k, k=q, v=queries) keys = keys + attn_out keys = self.norm4(keys) return queries, keys class Attention(nn.Module): """ An attention layer that allows for downscaling the size of the embedding after projection to queries, keys, and values. """ def __init__( self, embedding_dim: int, num_heads: int, downsample_rate: int = 1, ) -> None: super().__init__() self.embedding_dim = embedding_dim self.internal_dim = embedding_dim // downsample_rate self.num_heads = num_heads assert self.internal_dim % num_heads == 0, "num_heads must divide embedding_dim." self.q_proj = nn.Linear(embedding_dim, self.internal_dim) self.k_proj = nn.Linear(embedding_dim, self.internal_dim) self.v_proj = nn.Linear(embedding_dim, self.internal_dim) self.out_proj = nn.Linear(self.internal_dim, embedding_dim) def _separate_heads(self, x: Tensor, num_heads: int) -> Tensor: b, n, c = x.shape x = x.reshape(b, n, num_heads, c // num_heads) return x.transpose(1, 2) # B x N_heads x N_tokens x C_per_head def _recombine_heads(self, x: Tensor) -> Tensor: b, n_heads, n_tokens, c_per_head = x.shape x = x.transpose(1, 2) return x.reshape(b, n_tokens, n_heads * c_per_head) # B x N_tokens x C def forward(self, q: Tensor, k: Tensor, v: Tensor) -> Tensor: # Input projections q = self.q_proj(q) k = self.k_proj(k) v = self.v_proj(v) # Separate into heads q = self._separate_heads(q, self.num_heads) k = self._separate_heads(k, self.num_heads) v = self._separate_heads(v, self.num_heads) # Attention _, _, _, c_per_head = q.shape attn = q @ k.permute(0, 1, 3, 2) # B x N_heads x N_tokens x N_tokens attn = attn / math.sqrt(c_per_head) attn = torch.softmax(attn, dim=-1) # Get output out = attn @ v out = self._recombine_heads(out) out = self.out_proj(out) return out
HIV-Related Cardiovascular Disease, Statins, and the REPRIEVE Trial. HIV infection is associated with increased cardiovascular disease (CVD), and increased rates of myocardial infarction and stroke have been observed in HIV-infected individuals. After traditional risk factors that are more common among people living with HIV infection (such as smoking and diabetes) are accounted for, the excess risk for CVD persists. Recent studies suggest that increased immune activation and inflammation may contribute to excess risk for CVD in the context of HIV infection. Imaging studies in the HIV-infected population have found inflamed, noncalcified plaque that is vulnerable to rupture. Statin therapy may represent a potentially useful primary prevention strategy for CVD in HIV-infected individuals, as this class of drugs lowers lipid levels and may simultaneously reduce immune activation and inflammation. REPRIEVE (Randomized Trial to Prevent Vascular Events in HIV) is a large, multicenter study funded by the National Institutes of Health. REPRIEVE will test whether pitavastatin, a newer statin that does not have substantial interactions with antiretroviral drugs, can prevent vascular events over time among HIV-infected individuals who do not have known CVD. This study is now open to enrollment at sites throughout the United States and abroad and will hopefully provide definitive data on this important question.
class RetrieveData: """ Retrieves and stores the weather data """ def __init__(self): self.key: str = settings.DARK_SKY_API_KEY self.latitude: float = 1.2921 self.longitude: float = 36.8219 self.units = 'si' self.data: CurrentData def retrieve_current_data(self)->WeatherData: nairobi: forecast.Forecast = forecast( self.key, self.latitude, self.longitude, units=self.units, ) data = WeatherData( datetime.fromtimestamp(nairobi.currently['time']), nairobi.currently['summary'], nairobi.currently['icon'], float(nairobi.currently['temperature']), float(nairobi.currently['humidity']), float(nairobi.currently['pressure']), float(nairobi.currently['windSpeed']), float(nairobi.currently['precipIntensity']), float(nairobi.currently['uvIndex']), float(nairobi.currently['visibility']) ) return data def retrieve_specific_day_data(self, day: float)->json: nairobi: forecast.Forecast = forecast( self.key, self.latitude, self.longitude, units=self.units, time=day ) data = WeatherData( datetime.fromtimestamp(nairobi.currently['time']), nairobi.currently['summary'], nairobi.currently['icon'], float(nairobi.currently['temperature']), float(nairobi.currently['humidity']), float(nairobi.currently['pressure']), float(nairobi.currently['windSpeed']), float(nairobi.currently['precipIntensity']), float(nairobi.currently['uvIndex']), float(nairobi.currently['visibility']) ) data = data.to_dict() assert isinstance(data, str) return data
<gh_stars>1-10 package hackflow import ( "bufio" "context" "fmt" "io" "net/http" ) func doGetMoreURL(dict io.Reader, url string, outCh chan interface{}) chan interface{} { scanner := bufio.NewScanner(dict) for scanner.Scan() { outCh <- fmt.Sprintf("%s/%s", url, scanner.Text()) } return outCh } //GetMoreURL 读取字典,根据基本的url生成更多的url func GetMoreURL(dict io.Reader, urlCh chan interface{}) chan interface{} { outCh := make(chan interface{}, 1024) go func() { for url := range urlCh { doGetMoreURL(dict, url.(string), outCh) } close(outCh) }() return outCh } var DefaultStatusCodeBlackList string = "400,401,402,403,404,405,500,501,502,503,504" type BruteForceURLConfig struct { BaseURLCh chan interface{} RoutineCount int RandomAgent bool Proxy string StatusCodeBlackList string Dictionary io.Reader } type dirSearchGo struct { baseTool } func NewDirSearchGo(ctx context.Context) *dirSearchGo { return &dirSearchGo{ baseTool{ ctx: ctx, }, } } func (d *dirSearchGo) Run(config *BruteForceURLConfig) (chan *ParsedHttpResp, error) { moreURLCh := GetMoreURL(config.Dictionary, config.BaseURLCh) requestCh := GenRequest(d.ctx, GenRequestConfig{ URLCh: moreURLCh, MethodList: []string{http.MethodGet, http.MethodPost, http.MethodPut}, RandomAgent: true, }) respCh, err := RetryHttpSend(d.ctx, &RetryHttpSendConfig{ RequestCh: requestCh, RoutineCount: config.RoutineCount, HttpClientConfig: HttpClientConfig{ Proxy: config.Proxy, Redirect: false, Checktry: func(ctx context.Context, resp *http.Response, err error) (bool, error) { return false, nil }, RetryMax: 1, }, }) if err != nil { return nil, err } //解析响应报文 return ParseHttpResp(d.ctx, &ParseHttpRespConfig{ RoutineCount: 1000, HttpRespCh: respCh, }) }
<filename>examples/module_imports/mymodule.h<gh_stars>100-1000 #ifndef MYMODULE_H #define MYMODULE_H //@ require_module mymodule; //@ predicate mymodule_state(int x, int ctr); void mymodule_init(); //@ requires module(mymodule, true); //@ ensures mymodule_state(0, 0); void mymodule_destroy(); //@ requires mymodule_state(_, _); //@ ensures module(mymodule, false); void mymodule_set(int value); //@ requires mymodule_state(_, ?ctr); //@ ensures ctr == INT_MAX ? mymodule_state(value, ctr) : mymodule_state(value, ctr + 1); int mymodule_get(); //@ requires mymodule_state(?x, ?ctr); //@ ensures mymodule_state(x, ctr) &*& result == x; int mymodule_get_count(); //@ requires mymodule_state(?x, ?ctr); //@ ensures mymodule_state(x, ctr) &*& result == ctr; #endif
def read_file(): filename = sys.argv[1] file_input = open(filename, 'r') hexdump = file_input.readlines() file_input.close() new_dump = [] for dump in hexdump: new_dump.append(dump.replace("\n", "")) return new_dump
Design of a precise current source with adjustable frequency for AC-FSM AC-FSM is a new technology of external monitoring the internal corrosion for oil and gas pipelines. The current source stability of the FSM equipment is one of the important factors affecting the monitoring accuracy. According to the equipment requirements, a precision current source with adjustable frequency is designed. With C8051MCU as the control core, DAC conversion and differential amplifier are used to control the frequency and the current amount. The experiment shows that the output of excitation power supply has good consistency, the changing range was less than 0.3 %. When the temperature changes, under the condition of low frequency 5Hz, the change of current is less than 0.5 %, under the condition of high frequency 102Hz, the change of current is less than 0.4 %, which avoids the influence of temperature drift. This equipment is suitable for the long period operation of oil and gas pipelines in the field.
<gh_stars>0 import unittest from pystrings.palindrome_pairs import palindrome_pairs class PalindromePairsTests(unittest.TestCase): def test_one(self): self.assertEqual(palindrome_pairs(["bat", "tab", "cat"]), [[0, 1], [1, 0]]) def test_two(self): self.assertEqual( palindrome_pairs(["dog", "cow", "tap", "god", "pat"]), [[0, 3], [2, 4], [3, 0], [4, 2]], ) def test_three(self): self.assertEqual( palindrome_pairs(["abcd", "dcba", "lls", "s", "sssll"]), [[0, 1], [1, 0], [2, 4], [3, 2]], )
/** * Created by IntelliJ IDEA. * User: niels * Date: 20/09/11 * Time: 14:38 * To change this template use File | Settings | File Templates. */ public class PeptideCheckFormFieldFactory implements FormFieldFactory { private static final Logger logger = Logger.getLogger(PeptideCheckFormFieldFactory.class); private MyVaadinApplication iApplication; private FormHelp iFormHelp; private Select iSpeciesSelect; private Select iProteaseSelect; private TextField iPeptideSequenceTextField; private boolean iVisible = Boolean.FALSE; public PeptideCheckFormFieldFactory(MyVaadinApplication aApplication) { iApplication = aApplication; iFormHelp = iApplication.getFormHelp(); //species field iSpeciesSelect = new Select("Species"); iSpeciesSelect.setRequired(Boolean.TRUE); iSpeciesSelect.setItemCaptionMode(Select.ITEM_CAPTION_MODE_PROPERTY); iSpeciesSelect.setItemCaptionPropertyId("scientificName"); iSpeciesSelect.setImmediate(Boolean.TRUE); iSpeciesSelect.setNullSelectionAllowed(Boolean.FALSE); BeanItemContainer<Organism> lOrganismBeanItemContainer = new BeanItemContainer<Organism>(Organism.class); lOrganismBeanItemContainer.addAll(getOrganisms()); iSpeciesSelect.setContainerDataSource(lOrganismBeanItemContainer); iFormHelp.addHelpForComponent(iSpeciesSelect, PropertiesConfigurationHolder.getInstance().getString("form_help.species")); //protease field iProteaseSelect = new Select("Protease"); iProteaseSelect.setRequired(Boolean.TRUE); iProteaseSelect.setNullSelectionAllowed(Boolean.FALSE); iProteaseSelect.setImmediate(Boolean.TRUE); iFormHelp.addHelpForComponent(iProteaseSelect, PropertiesConfigurationHolder.getInstance().getString("form_help.protease")); //peptide field iPeptideSequenceTextField = new TextField("Peptide sequence"); iPeptideSequenceTextField.setRequired(Boolean.TRUE); iPeptideSequenceTextField.addValidator(new RegexpValidator("[a-zA-Z]+", PropertiesConfigurationHolder.getInstance().getString("form_validation.peptide_sequence"))); iFormHelp.addHelpForComponent(iPeptideSequenceTextField, (String) PropertiesConfigurationHolder.getInstance().getProperty("form_help.peptide_sequence")); iSpeciesSelect.addListener(new Property.ValueChangeListener() { public void valueChange(Property.ValueChangeEvent aValueChangeEvent) { Organism lOrganism = (Organism) iSpeciesSelect.getValue(); if (lOrganism == null) { iVisible = Boolean.FALSE; setFormComponentsVisible(iVisible); return; } else { if (!iVisible) { iVisible = Boolean.TRUE; setFormComponentsVisible(iVisible); } if (iApplication.getSigPepSession() == null || !iApplication.getSigPepSession().getOrganism().getScientificName().equals(lOrganism.getScientificName())) { logger.info("Creating sigpep session and query service for organism " + lOrganism.getScientificName()); iApplication.setSigPepSession(iApplication.getSigPepSessionFactory().createSigPepSession(lOrganism)); iApplication.setSigPepQueryService(iApplication.getSigPepSession().createSigPepQueryService()); } fillProteaseSelect(); } } }); setFormComponentsVisible(iVisible); } public Field createField(Item aItem, Object o, Component aComponent) { String pid = (String) o; if ("species".equals(pid)) { return iSpeciesSelect; } else if ("proteaseName".equals(pid)) { return iProteaseSelect; } else if ("peptideSequence".equals(pid)) { return iPeptideSequenceTextField; } return null; } private void setFormComponentsVisible(boolean setVisible) { iProteaseSelect.setVisible(setVisible); iPeptideSequenceTextField.setVisible(setVisible); } private void fillProteaseSelect() { if (iProteaseSelect.size() != 0) { iProteaseSelect.removeAllItems(); } for (String lProteaseName : iApplication.getSigPepSession().getSimpleQueryDao().getUsedProteaseNames()) { iProteaseSelect.addItem(lProteaseName); } } private Set<Organism> getOrganisms() { return iApplication.getSigPepSessionFactory().getOrganisms(); } }
/** * Converts {@link ParaObject}s to H2 rows and inserts them. * @param <P> type of object * @param appid app id * @param objects list of ParaObjects */ protected static <P extends ParaObject> void createRows(String appid, List<P> objects) { if (StringUtils.isBlank(appid) || objects == null || objects.isEmpty()) { return; } Connection conn = null; PreparedStatement p = null; try { conn = getConnection(); String table = getTableNameForAppid(appid); p = conn.prepareStatement("MERGE INTO " + table + " VALUES (?,?,?,?,?,?,?,?)"); for (P object : objects) { if (StringUtils.isBlank(object.getId())) { object.setId(Utils.getNewId()); } if (object.getTimestamp() == null) { object.setTimestamp(Utils.timestamp()); } object.setAppid(appid); p.setString(1, object.getId()); p.setString(2, object.getType()); p.setString(3, object.getName()); p.setString(4, object.getParentid()); p.setString(5, object.getCreatorid()); p.setTimestamp(6, new Timestamp(object.getTimestamp())); if (object.getUpdated() == null) { p.setNull(7, Types.TIMESTAMP); } else { p.setTimestamp(7, new Timestamp(object.getUpdated())); } p.setString(8, ParaObjectUtils.getJsonWriterNoIdent(). writeValueAsString(ParaObjectUtils.getAnnotatedFields(object, false))); p.addBatch(); } p.executeBatch(); } catch (Exception e) { logger.error(null, e); } finally { closeConnection(conn); closeStatement(p); } }
/** * @description: access_token * @author: hlx 2018-09-19 **/ @Data public class UserAccessToken implements Serializable{ private String access_token; private Integer expires_in; private String refresh_token; private String openid; private String scope; public UserAccessToken() { } }
<filename>src/_utils/interfaces.ts<gh_stars>0 export interface CommonFieldsProps { readonly id?: string readonly name: string readonly className?: Classcat.Class readonly value: string readonly disabled?: boolean readonly autoFocus?: boolean readonly required?: boolean } export interface A11yProps { id?: string role?: string 'aria-label'?: string 'aria-labelledby'?: string 'aria-describedby'?: string 'aria-controls'?: string } type A11yKeys = keyof A11yProps export function pickA11yProps<T extends A11yProps>(source: T): Pick<T, A11yKeys> { const returnValue = {} as Pick<T, A11yKeys> const keys: A11yKeys[] = [ 'id', 'role', 'aria-label', 'aria-labelledby', 'aria-describedby', 'aria-controls', ] keys.forEach(k => { returnValue[k] = source[k] }) return returnValue }