text
stringlengths 1
1.05M
|
|---|
<filename>mongoose.js<gh_stars>0
const mongoose = require('mongoose');
mongoose.connect("mongodb://localhost:27017/autos",
{useNewUrlParser: true})
mongoose.connection
.once("open", () => console.log('Database verbonden! (mongoose.js, regel 6)'))
.on("error", (error => {
console.log("Oeps! Er is wat misgegaan", error);
}))
|
wrk -t100 -c500 -d20s http://localhost:8080 > wsgi-100-500.log
|
<reponame>addyvm22/vantageProject1
export class Event{
constructor(
public name : string,
public location : string,
public city: string,
public country :string,
public number_of_seats: string,
public start_date: string,
public end_date: string){
}
}
|
<gh_stars>0
import { Injectable } from '@angular/core';
import { ToastrService, ToastrConfig, ProgressAnimationType } from 'ngx-toastr';
@Injectable()
export class ToastService {
private animationType: ProgressAnimationType = 'increasing';
private successToastOptions = {
positionClass: 'toast-top-center',
timeOut: 2500
};
private errorToastOptions = {
positionClass: 'toast-top-center',
closeButton: true,
};
constructor(private toastService: ToastrService) {
}
public success(message: string, title?: string): void {
this.toastService.success(message, title, this.successToastOptions);
}
public error(message: string, title?: string): void {
this.toastService.error(message, title, this.errorToastOptions);
}
}
|
#!/usr/bin/env bash
docker-compose -f docker/tidb.yml up -d
docker-compose -f docker/neo4j.yml up -d
docker-compose -f docker/elasticsearch.yml up -d
|
<filename>pirates/piratesgui/GameOptionsMatrix.py<gh_stars>1-10
# File: G (Python 2.4)
GameOptionsMatrix = {
('0x0000', '0x0000', 'nt.2.5.1'): ('r1s0s0t1c0e0c2t2m0', 10.80522, 3.21062, 230),
('0x0000', '0x0000', 'posix.-1.-1.-1'): ('r0s1s0t4c1e1c1t1m0', 7.7054900000000002, 3.68554, 164),
('0x1002', '0x4152', 'nt.2.5.1'): ('r1s1s0t1c1e2c2t2m0', 10.663959999999999, 4.1073399999999998, 308),
('0x1002', '0x4336', 'nt.2.5.1'): ('r0s0s0t4c1e0c0t0m0', 7.4325599999999996, 2.8386800000000001, 215),
('0x1002', '0x4337', 'nt.2.5.1'): ('r1s0s0t4c1e0c0t0m1', 5.9333099999999996, 3.73204, 4290),
('0x1002', '0x4437', 'nt.2.5.1'): ('r1s0s0t2c1e1c1t1m0', 6.77433, 3.7682000000000002, 561),
('0x1002', '0x4c59', 'nt.2.5.1'): ('r1s0s0t2c1e1c1t1m0', 7.9733799999999997, 4.7099299999999999, 1431),
('0x1002', '0x4c66', 'nt.2.5.1'): ('r1s0s0t1c1e2c2t2m0', 12.222860000000001, 4.9211799999999997, 280),
('0x1002', '0x4e50', 'nt.2.5.1'): ('r0s0s0t4c1e0c1t1m0', 11.53013, 4.9451000000000001, 229),
('0x1002', '0x5159', 'nt.2.5.1'): ('r0s0s0t4c1e0c0t0m1', 7.3187499999999996, 3.6355, 112),
('0x1002', '0x5246', 'nt.2.5.1'): ('r1s0s0t4c1e0c0t0m1', 5.6007499999999997, 4.0055500000000004, 928),
('0x1002', '0x5446', 'nt.2.5.1'): ('r0s0s0t4c1e0c0t0m0', 8.2684200000000008, 2.8528199999999999, 190),
('0x1002', '0x5835', 'nt.2.5.1'): ('r0s0s0t2c0e1c0t0m0', 11.285069999999999, 2.9400300000000001, 201),
('0x1002', '0x5954', 'nt.2.5.1'): ('r1s0s0t4c1e0c0t0m0', 13.47171, 4.7895599999999998, 152),
('0x1002', '0x5955', 'nt.2.5.1'): ('r0s0s0t2c1e0c0t0m0', 14.674849999999999, 4.7779199999999999, 163),
('0x1002', '0x5960', 'nt.2.5.1'): ('r2s1s0t1c1e2c1t1m0', 10.47184, 4.5880200000000002, 174),
('0x1002', '0x5974', 'nt.2.5.1'): ('r1s0s0t4c1e0c0t0m0', 10.288679999999999, 3.4019499999999998, 106),
('0x1002', '0x5974', 'nt.2.6.0'): ('r1s0s0t1c1e0c0t0m1', 12.71058, 4.8982099999999997, 274),
('0x1002', '0x5975', 'nt.2.5.1'): ('r0s0s0t4c1e0c0t0m0', 12.707610000000001, 4.8710699999999996, 775),
('0x1002', '0x5975', 'nt.2.6.0'): ('r0s0s0t1c0e0c2t0m0', 13.15283, 3.6918899999999999, 106),
('0x1002', '0x5a41', 'nt.2.5.1'): ('r1s0s0t4c1e0c0t0m1', 9.04786, 4.9805799999999998, 4043),
('0x1002', '0x5a42', 'nt.2.5.1'): ('r1s0s0t4c1e0c0t0m1', 8.3019700000000007, 4.7116300000000004, 2691),
('0x1002', '0x5a61', 'nt.2.5.1'): ('r0s0s1t1c0e2c2t1m1', 10.911379999999999, 3.70743, 123),
('0x1002', '0x5a62', 'nt.2.5.1'): ('r0s0s0t4c0e0c0t0m0', 9.7994500000000002, 4.1842800000000002, 182),
('0x1002', '0x5a62', 'nt.2.6.0'): ('r2s0s0t4c1e0c0t0m1', 8.8828200000000006, 4.85229, 262),
('0x1002', '0x5b62', 'nt.2.5.1'): ('r1s0s0t2c1e1c0t0m0', 15.660589999999999, 2.6379199999999998, 203),
('0x1002', '0x7146', 'nt.2.5.1'): ('r2s0s1t2c1e1c1t1m0', 15.14138, 4.0740800000000004, 116),
('0x1002', '0x71c2', 'nt.2.5.1'): ('r1s0s0t2c1e0c1t1m0', 15.53594, 4.9111099999999999, 128),
('0x1002', '0x7834', 'nt.2.5.1'): ('r1s0s0t2c1e1c1t1m0', 7.4180700000000002, 4.03538, 166),
('0x1002', '0x7835', 'nt.2.5.1'): ('r1s0s0t4c1e0c0t0m1', 8.8527799999999992, 4.8418999999999999, 540),
('0x1002', '0x791e', 'nt.2.5.1'): ('r0s0s0t4c1e0c1t1m0', 11.918229999999999, 3.7416700000000001, 203),
('0x1002', '0x791f', 'nt.2.6.0'): ('r1s0s0t4c1e0c2t2m0', 15.27251, 4.6304699999999999, 171),
('0x1039', '0x6325', 'nt.2.5.1'): ('r1s0s0t4c1e0c0t0m1', 6.7796599999999998, 4.6264500000000002, 11470),
('0x1039', '0x6330', 'nt.2.5.0'): ('r2s0s0t4c1e0c0t0m1', 11.410679999999999, 4.2844199999999999, 103),
('0x1039', '0x6330', 'nt.2.5.1'): ('r0s0s0t4c1e0c0t0m0', 9.4169699999999992, 3.05681, 383),
('0x10de', '0x0028', 'nt.2.5.1'): ('r1s0s0t4c1e0c0t0m1', 6.8044700000000002, 4.8079000000000001, 1699),
('0x10de', '0x002c', 'nt.2.5.1'): ('r1s0s0t2c1e1c1t1m0', 6.0631700000000004, 2.7143899999999999, 972),
('0x10de', '0x002d', 'nt.2.5.1'): ('r0s0s0t4c1e0c0t0m1', 6.7932199999999998, 3.8424700000000001, 649),
('0x10de', '0x0110', 'nt.2.5.0'): ('r0s0s0t4c0e0c0t0m0', 11.380330000000001, 4.0506099999999998, 122),
('0x10de', '0x0110', 'nt.2.5.1'): ('r1s0s0t4c1e0c0t0m0', 11.185230000000001, 4.8938899999999999, 386),
('0x10de', '0x0162', 'nt.2.5.1'): ('r2s1s1t1c1e2c2t2m0', 11.50915, 4.1356099999999998, 164),
('0x10de', '0x0171', 'nt.2.5.1'): ('r1s0s0t4c1e1c1t1m0', 14.848839999999999, 3.76349, 129),
('0x10de', '0x0172', 'nt.2.5.1'): ('r1s0s0t4c0e0c0t0m0', 17.418990000000001, 4.6188000000000002, 179),
('0x10de', '0x017a', 'nt.2.5.1'): ('r0s0s0t2c1e1c0t0m0', 12.446709999999999, 4.1500899999999996, 152),
('0x10de', '0x0181', 'nt.2.5.1'): ('r1s0s0t2c1e1c2t2m0', 13.77392, 4.4326699999999999, 487),
('0x10de', '0x0185', 'nt.2.5.0'): ('r1s0s0t2c1e1c1t1m0', 8.7873199999999994, 4.7097600000000002, 347),
('0x10de', '0x0185', 'nt.2.5.1'): ('r1s0s0t2c1e1c1t2m0', 13.09796, 2.4777, 147),
('0x10de', '0x01d1', 'nt.2.5.1'): ('r2s1s0t2c1e1c1t1m0', 15.572139999999999, 4.4029999999999996, 140),
('0x10de', '0x01d7', 'nt.2.5.1'): ('r2s1s1t2c1e2c2t1m0', 13.831720000000001, 3.3864200000000002, 145),
('0x10de', '0x01d8', 'nt.2.5.1'): ('r2s0s1t1c1e2c2t2m0', 13.14921, 4.0702600000000002, 315),
('0x10de', '0x01d8', 'nt.2.6.0'): ('r0s0s0t2c1e0c0t0m1', 9.1299100000000006, 4.0579200000000002, 117),
('0x10de', '0x01df', 'nt.2.5.1'): ('r2s0s0t4c0e2c2t2m0', 16.472169999999998, 4.4119099999999998, 115),
('0x10de', '0x01df', 'nt.2.6.0'): ('r2s1s0t1c1e2c2t2m0', 15.19056, 4.49254, 233),
('0x10de', '0x01f0', 'nt.2.5.1'): ('r0s0s0t2c1e1c2t1m1', 12.048, 3.6370800000000001, 150),
('0x10de', '0x0221', 'nt.2.5.1'): ('r1s0s0t2c0e1c1t1m0', 16.601870000000002, 3.88924, 214),
('0x10de', '0x0221', 'nt.2.6.0'): ('r2s1s1t2c1e1c1t1m0', 13.011229999999999, 3.1724999999999999, 187),
('0x10de', '0x0222', 'nt.2.5.1'): ('r0s1s1t2c1e1c2t2m0', 9.7887900000000005, 3.3190900000000001, 116),
('0x10de', '0x0241', 'nt.2.5.1'): ('r0s0s0t2c0e1c1t1m0', 16.33671, 4.1279599999999999, 158),
('0x10de', '0x0241', 'nt.2.6.0'): ('r1s1s1t4c1e2c0t1m0', 17.292159999999999, 4.9147600000000002, 102),
('0x10de', '0x0242', 'nt.2.5.0'): ('r1s0s0t4c1e0c0t0m1', 16.968029999999999, 4.0619399999999999, 147),
('0x10de', '0x0242', 'nt.2.5.1'): ('r0s0s0t2c1e1c1t0m0', 18.699539999999999, 4.6235099999999996, 218),
('0x10de', '0x0244', 'nt.2.5.1'): ('r2s0s0t4c1e0c0t0m0', 15.46697, 3.7277999999999998, 109),
('0x10de', '0x0244', 'nt.2.6.0'): ('r0s0s0t2c1e0c1t1m0', 19.13289, 4.1470599999999997, 228),
('0x10de', '0x0247', 'nt.2.6.0'): ('r2s0s1t2c1e1c1t1m0', 12.14507, 4.3270900000000001, 142),
('0x10de', '0x0250', 'nt.2.5.1'): ('r1s0s0t2c1e1c1t2m0', 9.6937499999999996, 4.2436299999999996, 144),
('0x10de', '0x0312', 'nt.2.5.1'): ('r2s0s0t1c1e2c2t2m0', 9.0507299999999997, 3.2231000000000001, 205),
('0x10de', '0x0322', 'nt.2.5.1'): ('r1s0s0t1c1e0c0t0m0', 23.477139999999999, 4.5664699999999998, 140),
('0x10de', '0x0326', 'nt.2.5.1'): ('r1s0s1t2c1e1c2t1m0', 16.453289999999999, 4.8002700000000003, 167),
('0x10de', '0x0326', 'nt.2.6.0'): ('r1s0s0t4c1e0c0t0m0', 15.94444, 3.0167000000000002, 153),
('0x10de', '0x0343', 'nt.2.5.1'): ('r1s1s1t1c1e2c1t1m0', 8.6313800000000001, 3.0136500000000002, 290),
('0x10de', '0x03d0', 'nt.2.6.0'): ('r0s0s0t2c0e1c1t1m0', 19.370729999999998, 4.7670399999999997, 123),
('0x10de', '0x03d1', 'nt.2.5.1'): ('r0s0s0t4c1e0c0t0m0', 19.002330000000001, 4.3864999999999998, 215),
('0x10de', '0x0531', 'nt.2.6.0'): ('r0s0s0t2c1e1c1t1m0', 16.355840000000001, 3.7639100000000001, 317),
('0x1106', '0x3230', 'nt.2.5.1'): ('r1s0s0t4c1e0c0t0m1', 10.43383, 4.3739499999999998, 5614),
('0x1106', '0x3371', 'nt.2.5.1'): ('r1s0s0t4c1e0c0t0m1', 8.82714, 4.7605300000000002, 2502),
('0x1106', '0x3371', 'nt.2.6.0'): ('r0s0s0t4c1e0c0t0m1', 5.2466699999999999, 3.4482400000000002, 135),
('0x5333', '0x8a26', 'nt.2.5.1'): ('r1s0s0t4c1e0c0t0m1', 4.4750399999999999, 3.0866600000000002, 1863),
('0x5333', '0x8d04', 'nt.2.5.1'): ('r0s0s0t4c1e0c0t0m1', 4.9931200000000002, 2.7521, 189),
('0x8086', '0x2562', 'nt.2.5.1'): ('r1s0s0t4c0e2c0t0m1', 15.75259, 4.46638, 116),
('0x8086', '0x2572', 'nt.2.5.1'): ('r0s0s0t4c1e0c1t0m1', 13.22174, 4.86944, 115),
('0x8086', '0x2582', 'nt.2.5.1'): ('r0s0s0t2c1e1c2t2m0', 14.17008, 4.5103299999999997, 127),
('0x8086', '0x2582', 'nt.2.6.0'): ('r1s0s0t1c1e2c2t2m0', 12.405709999999999, 4.82599, 175),
('0x8086', '0x2592', 'nt.2.5.1'): ('r0s0s0t4c1e1c0t0m0', 14.199999999999999, 3.7468300000000001, 127),
('0x8086', '0x2772', 'nt.2.5.1'): ('r0s0s0t2c1e0c0t0m1', 12.67061, 4.9232300000000002, 245),
('0x8086', '0x2772', 'nt.2.6.0'): ('r1s0s0t2c0e0c0t0m1', 17.452760000000001, 4.5514400000000004, 163),
('0x8086', '0x27a2', 'nt.2.5.1'): ('r1s0s0t2c1e1c1t1m0', 14.8704, 4.8711000000000002, 375),
('0x8086', '0x27a2', 'nt.2.6.0'): ('r0s0s0t2c1e0c0t0m1', 12.0685, 4.6697699999999998, 127),
('0x8086', '0x2972', 'nt.2.5.1'): ('r1s0s0t2c1e1c2t2m0', 11.275, 3.70309, 160),
('0x8086', '0x2972', 'nt.2.6.0'): ('r1s0s0t4c1e0c0t0m1', 8.7266600000000007, 4.2717200000000002, 5653),
('0x8086', '0x29a2', 'nt.2.5.1'): ('r2s0s1t1c1e2c2t2m1', 9.0861099999999997, 1.6142000000000001, 108),
('0x8086', '0x29a2', 'nt.2.6.0'): ('r0s0s0t4c1e0c0t0m1', 10.55226, 4.7256200000000002, 509),
('0x8086', '0x29c2', 'nt.2.5.1'): ('r2s1s1t1c1e2c2t2m0', 12.98696, 4.0732100000000004, 276),
('0x8086', '0x29c2', 'nt.2.6.0'): ('r1s0s0t1c1e2c1t1m1', 14.58839, 4.9627999999999997, 112),
('0x8086', '0x2a02', 'nt.2.6.0'): ('r1s0s0t4c1e0c1t1m0', 19.085439999999998, 3.2551899999999998, 577),
('0x8086', '0x3582', 'nt.2.5.1'): ('r0s0s0t2c1e0c0t0m0', 9.9886999999999997, 2.6893099999999999, 177) }
|
#! /usr/bin/env bash
set -e
apt-get update -qq
apt-get upgrade -yqq
apt-get install python{,3}-pip python{,3}-dev build-essential python{,3}-virtualenv -yqq
if [ -f /.dockerenv ]; then
apt-get -yqq autoremove
apt-get -yqq clean
rm -rf /var/lib/apt/lists/* /var/cache/* /tmp/* /usr/share/locale/* /usr/share/man /usr/share/doc /lib/xtables/libip6*
fi
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.iosClose = void 0;
var iosClose = {
"viewBox": "0 0 512 512",
"children": [{
"name": "style",
"attribs": {
"type": "text/css"
},
"children": []
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"class": "st0",
"d": "M403.1,108.9c-81.2-81.2-212.9-81.2-294.2,0s-81.2,212.9,0,294.2c81.2,81.2,212.9,81.2,294.2,0\r\n\t\tS484.3,190.1,403.1,108.9z M352,340.2L340.2,352l-84.4-84.2l-84,83.8L160,339.8l84-83.8l-84-83.8l11.8-11.8l84,83.8l84.4-84.2\r\n\t\tl11.8,11.8L267.6,256L352,340.2z"
},
"children": [{
"name": "path",
"attribs": {
"class": "st0",
"d": "M403.1,108.9c-81.2-81.2-212.9-81.2-294.2,0s-81.2,212.9,0,294.2c81.2,81.2,212.9,81.2,294.2,0\r\n\t\tS484.3,190.1,403.1,108.9z M352,340.2L340.2,352l-84.4-84.2l-84,83.8L160,339.8l84-83.8l-84-83.8l11.8-11.8l84,83.8l84.4-84.2\r\n\t\tl11.8,11.8L267.6,256L352,340.2z"
},
"children": []
}]
}]
}]
};
exports.iosClose = iosClose;
|
<filename>cblibrary/src/main/java/cbedoy/cblibrary/services/BlurService.java<gh_stars>1-10
/**
* Created by <NAME> on 15/05/14.
* exchange-android - Pademobile
*/
package cbedoy.cblibrary.services;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.Color;
import android.graphics.Rect;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import android.os.AsyncTask;
import android.os.Build;
import android.support.v8.renderscript.Allocation;
import android.support.v8.renderscript.Element;
import android.support.v8.renderscript.RenderScript;
import android.support.v8.renderscript.ScriptIntrinsicBlur;
import android.view.View;
import java.util.concurrent.ExecutionException;
import cbedoy.cblibrary.interfaces.IAppViewManager;
public class BlurService {
public static BlurService instance;
private static Bitmap mBluredScreenShot;
private static Bitmap mBluredService;
private static Drawable mDrawableBackground;
public static BlurService getInstance(){
if(instance == null){
instance = new BlurService();
}
return instance;
}
private Bitmap blurRenderScript(Bitmap smallBitmap)
{
Bitmap bitmap = null;
if (Build.VERSION.SDK_INT >= 16)
{
bitmap = supportedBlur(smallBitmap, 25);
}
return bitmap;
}
private Bitmap blurRenderScript(Bitmap smallBitmap, int radius)
{
Bitmap bitmap = null;
if (Build.VERSION.SDK_INT >= 16)
{
bitmap = supportedBlur(smallBitmap, radius);
}
return bitmap;
}
public Bitmap performRequestBlurByImage(Bitmap bitmap)
{
DoAsyncBlur doAsyncBlur = new DoAsyncBlur();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
doAsyncBlur.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, bitmap);
} else {
doAsyncBlur.execute(bitmap);
}
try
{
return doAsyncBlur.get();
} catch (InterruptedException e) {
return bitmap;
} catch (ExecutionException e) {
return bitmap;
}
}
public Bitmap performRequestBlurByImageWithRadius(Bitmap bitmap, int radius)
{
DoAsyncBlurWithRadius doAsyncBlur = new DoAsyncBlurWithRadius();
doAsyncBlur.setRadius(radius);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
doAsyncBlur.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, bitmap);
} else {
doAsyncBlur.execute(bitmap);
}
try {
return doAsyncBlur.get();
} catch (InterruptedException e) {
return bitmap;
} catch (ExecutionException e) {
return bitmap;
}
}
private class DoAsyncBlur extends AsyncTask<Bitmap, Void, Bitmap> {
@Override
protected Bitmap doInBackground(Bitmap... bitmaps) {
return blurRenderScript(bitmaps[0]);
}
}
private class DoAsyncBlurWithRadius extends AsyncTask<Bitmap, Void, Bitmap> {
private int radius;
public void setRadius(int radius) {
this.radius = radius;
}
@Override
protected Bitmap doInBackground(Bitmap... bitmaps) {
return blurRenderScript(bitmaps[0], radius);
}
}
private Bitmap supportedBlur(Bitmap sentBitmap, int radius)
{
Bitmap output = Bitmap.createBitmap(sentBitmap.getWidth(), sentBitmap.getHeight(), sentBitmap.getConfig());
RenderScript rs = RenderScript.create(ApplicationLoader.mainContext);
ScriptIntrinsicBlur script = ScriptIntrinsicBlur.create(rs, Element.U8_4(rs));
Allocation inAlloc = Allocation.createFromBitmap(rs, sentBitmap, Allocation.MipmapControl.MIPMAP_NONE, Allocation.USAGE_GRAPHICS_TEXTURE);
Allocation outAlloc = Allocation.createFromBitmap(rs, output);
script.setRadius(radius > 25 ? 25 : radius);
script.setInput(inAlloc);
script.forEach(outAlloc);
outAlloc.copyTo(output);
rs.destroy();
sentBitmap.recycle();
System.gc();
return output;
}
public Drawable doBlurWithActivity(Activity activity)
{
if(mBluredScreenShot != null){
mBluredScreenShot.recycle();
mBluredScreenShot = null;
}
if (mDrawableBackground instanceof BitmapDrawable)
{
BitmapDrawable bitmapDrawable = (BitmapDrawable) mDrawableBackground;
Bitmap bitmap = bitmapDrawable.getBitmap();
bitmap.recycle();
}
takeCurrentScreamShot(activity);
performBluring();
return mDrawableBackground;
}
private void takeCurrentScreamShot(Activity activity)
{
try
{
View view = activity.getWindow().getDecorView();
view.setDrawingCacheEnabled(true);
view.buildDrawingCache();
Bitmap drawingCache = view.getDrawingCache();
Rect rect = new Rect();
activity.getWindow().getDecorView().getWindowVisibleDisplayFrame(rect);
int statusBarHeight = rect.top;
int width = activity.getWindowManager().getDefaultDisplay().getWidth();
int height = activity.getWindowManager().getDefaultDisplay().getHeight();
mBluredScreenShot = Bitmap.createBitmap(drawingCache, 0, statusBarHeight, width, height - statusBarHeight);
view.destroyDrawingCache();
drawingCache.recycle();
System.gc();
}
catch (Exception e)
{
System.gc();
}
}
private void performBluring()
{
if(mBluredScreenShot != null)
{
Bitmap blurByImageWithRadius = BlurService.getInstance().performRequestBlurByImageWithRadius(mBluredScreenShot, 10);
if(blurByImageWithRadius != null)
{
mDrawableBackground = new BitmapDrawable(blurByImageWithRadius);
}
else
{
mDrawableBackground = new ColorDrawable(Color.TRANSPARENT);
}
}
else
{
mDrawableBackground = new ColorDrawable(Color.TRANSPARENT);
}
}
public void doBlurWithBitmap(Bitmap bitmapFromFile, IAppViewManager viewManager)
{
if(mBluredService != null)
{
mBluredService.recycle();
mBluredService = null;
}
Bitmap firstBlurImage = performRequestBlurByImage(bitmapFromFile);
if(firstBlurImage != null)
{
mBluredService = performRequestBlurByImage(firstBlurImage);
viewManager.setBackgroundViewController(mBluredService);
}
else
{
ColorDrawable colorDrawable = new ColorDrawable(Color.parseColor("#CC2F2B29"));
viewManager.setBackgroundViewController(bitmapFromFile, colorDrawable);
}
if(bitmapFromFile != null)
{
bitmapFromFile.recycle();
}
if(firstBlurImage != null)
{
firstBlurImage.recycle();
}
}
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-old/7-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-old/7-1024+0+512-STWS-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_trigrams_within_sentences_first_two_thirds_full --eval_function last_element_eval
|
use std::cmp::PartialEq;
struct Rect {
// Define the Rect struct properties
}
struct ComponentLink<T> {
// Define the ComponentLink struct properties
}
enum ShouldRender {
Yes,
No,
}
trait Component {
type Properties;
fn create(properties: Self::Properties, frame: Rect, _link: ComponentLink<Self>) -> Self;
fn change(&mut self, properties: Self::Properties) -> ShouldRender;
}
struct CustomComponent {
properties: String, // Replace with the actual type of properties
frame: Rect, // Replace with the actual type of frame
}
impl Component for CustomComponent {
type Properties = String; // Replace with the actual type of properties
fn create(properties: Self::Properties, frame: Rect, _link: ComponentLink<Self>) -> Self {
Self { properties, frame }
}
fn change(&mut self, properties: Self::Properties) -> ShouldRender {
if self.properties != properties {
self.properties = properties;
ShouldRender::Yes
} else {
ShouldRender::No
}
}
}
fn main() {
let frame = Rect { /* Initialize frame properties */ };
let link = ComponentLink { /* Initialize link properties */ };
let initial_properties = "Initial properties".to_string(); // Replace with the actual initial properties
let mut custom_component = CustomComponent::create(initial_properties, frame, link);
let new_properties = "New properties".to_string(); // Replace with the actual new properties
let should_render = custom_component.change(new_properties);
match should_render {
ShouldRender::Yes => {
// Re-render the component
println!("Component should be re-rendered");
}
ShouldRender::No => {
// Do not re-render the component
println!("Component should not be re-rendered");
}
}
}
|
<gh_stars>0
package com.iafinn;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* Main servlet for the web app
* Processes movie ratings and filter requests
* and calls the recommendation model
* then forwards results to results.jsp
*/
@WebServlet("/MainServlet")
public class MainServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse res) throws IOException, ServletException {
processRequest(req, res);
}
private void processRequest(HttpServletRequest req, HttpServletResponse res) throws IOException, ServletException {
MovieDatabase.initialize("com/iafinn/movies_clean.csv");
ArrayList<String> titles = MovieDatabase.getMovieTitles();
Rater me = new EfficientRater("me");
for (String title : titles) {
Object rated = req.getParameter(title);
if (rated!=null) {
me.addRating(MovieDatabase.getIdFromTitle(title), Double.parseDouble((String) rated));
}
}
String year1 = (String) req.getParameter("startyear");
String year2 = (String) req.getParameter("endyear");
int startYear = 1900;
int endYear = 2020;
if (!year1.equals("Start year")) {
startYear = Integer.parseInt(year1);
}
if (!year2.equals("End year")) {
endYear = Integer.parseInt(year2);
}
String genre = (String) req.getParameter("genre");
ModelRunner mr = new ModelRunner();
List<Rating> recs = mr.printSimilarRatingsByYearAndGenre(me, startYear, endYear, genre);
String movie = "";
for (int k=0; k<recs.size() && k<=10; k++) {
String id = recs.get(k).getItem();
movie += id + ";;" + MovieDatabase.getTitle(id) + ";;" + MovieDatabase.getGenres(id) + "::";
}
req.setAttribute("recMovies", movie);
RequestDispatcher rd = req.getRequestDispatcher("results.jsp");
rd.forward(req, res);
}
}
|
<reponame>neonkitchen/disent
# ~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~
# MIT License
#
# Copyright (c) 2021 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~
from typing import Sequence
import numpy as np
from torch.utils.data import Dataset
from disent.dataset._augment_util import AugmentableDataset
from disent.util import LengthIter
# ========================================================================= #
# Randomly Paired Dataset #
# ========================================================================= #
class RandomDataset(Dataset, LengthIter, AugmentableDataset):
def __init__(
self,
data: Sequence,
transform=None,
augment=None,
num_samples=1,
):
self._data = data
self._num_samples = num_samples
# augmentable dataset
self._transform = transform
self._augment = augment
super().__init__()
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
# Augmentable Dataset Overrides #
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
@property
def transform(self):
return self._transform
@property
def augment(self):
return self._augment
def _get_augmentable_observation(self, idx):
return self._data[idx]
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
# Sampling #
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def __len__(self):
return len(self._data)
def __getitem__(self, idx):
# sample indices
indices = (idx, *np.random.randint(0, len(self), size=self._num_samples-1))
# get data
return self.dataset_get_observation(*indices)
# ========================================================================= #
# End #
# ========================================================================= #
|
/**
* Copyright © 2015, University of Washington
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* * Neither the name of the University of Washington nor the names
* of its contributors may be used to endorse or promote products
* derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL UNIVERSITY OF
* WASHINGTON BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package edu.uw.apl.tupelo.store.tools;
import org.apache.commons.cli.*;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import edu.uw.apl.tupelo.store.filesys.FilesystemStore;
/**
* A container for state common to all command line-driven tools in
* this module
*/
class Base {
protected Base() {
storeLocation = STORELOCATIONDEFAULT;
log = Logger.getLogger( getClass() );
}
protected void readArgs( String[] args ) {
Options os = commonOptions();
String usage = commonUsage() + " (diskID sessionID)?";
final String HEADER = "";
final String FOOTER = "";
CommandLineParser clp = new DefaultParser();
CommandLine cl = null;
try {
cl = clp.parse( os, args );
} catch( ParseException pe ) {
printUsage( os, usage, HEADER, FOOTER );
System.exit(1);
}
commonParse( os, cl, usage, HEADER, FOOTER );
if( all )
return;
args = cl.getArgs();
if( args.length < 2 ) {
printUsage( os, usage, HEADER, FOOTER );
System.exit(1);
}
diskID = args[0];
sessionID = args[1];
}
static protected void printUsage( Options os, String usage,
String header, String footer ) {
HelpFormatter hf = new HelpFormatter();
hf.setWidth( 80 );
hf.printHelp( usage, header, os, footer );
}
protected Options commonOptions() {
Options os = new Options();
os.addOption( "a", false,
"Hash all managed disks (those done not re-computed)" );
os.addOption( "d", false, "Debug" );
os.addOption( "h", false, "Help" );
os.addOption( "s", true,
"Store directory. Defaults to " + STORELOCATIONDEFAULT );
os.addOption( "v", false, "Verbose" );
return os;
}
protected String commonUsage() {
return getClass().getName() + " [-a] [-d] [-h] [-s storeLocation] [-v]";
}
protected void commonParse( Options os, CommandLine cl, String usage,
String header, String footer ) {
boolean help = cl.hasOption( "h" );
if( help ) {
printUsage( os, usage, header, footer );
System.exit(1);
}
all = cl.hasOption( "a" );
debug = cl.hasOption( "d" );
verbose = cl.hasOption( "v" );
if( debug )
log.setLevel( Level.DEBUG );
if( cl.hasOption( "s" ) ) {
storeLocation = cl.getOptionValue( "s" );
}
}
protected String storeLocation;
protected FilesystemStore store;
protected boolean all;
protected String diskID, sessionID;
protected Logger log;
static protected final String STORELOCATIONDEFAULT = "./test-store";
static protected boolean debug, verbose;
}
// eof
|
#!/bin/sh
export usb=/mnt/usb/usbdisk
export usb1=/mnt/usb/usbdisk1
|
#!/usr/bin/env bash
dir="$(dirname "$(realpath "$0")")"
function fail {
echo $1
exit $2
}
if [ $# -ne 0 ] ; then
fail "No argument expected" 2
fi
if ! which xmlstarlet >/dev/null 2>&1 ;then
fail "Need program xmlstarlet" 3
fi
if [ "$TRAVIS_PULL_REQUEST" != "false" ]; then
fail "Cannot version bump on a PR" 3
fi
echo "Checking if need to do version bump."
version="$(xmlstarlet sel -t -v "/_:project/_:version" "pom.xml")"
msg="$(git log -1 --pretty=%B)"
branchC="true"
versionC="true"
msgC="true"
finished="false"
if [[ "$TRAVIS_BRANCH" != "versionBump/"* ]] ;then
branchC="false"
fi
if [[ "$version" == *"Snapshot" ]] ;then
versionC="false"
fi
if [[ "$msg" != "Bump version to "* ]] ;then
msgC="false"
fi
if [[ "$msg" == "Prepare for development on"* ]];then
finished="true"
fi
if [[ "$TRAVIS_BRANCH" == "$TRAVIS_TAG" ]] ;then
finished="true"
fi
if [ "$branchC" == "true" ] && [ "$versionC" == "true" ] && [ "$msgC" == "true" ] ;then
echo "Doing version bump."
elif [ "$branchC" == "false" ] && [ "$versionC" == "false" ] && [ "$msgC" == "false" ] ;then
echo "Not doing version bump."
exit 0
elif [ "$finished" == "true" ] ;then
echo "Detected finished version bump. Not doing it again."
exit 0
else
echo "Version bump indicators don't match!"
echo "Branch name: $TRAVIS_BRANCH"
echo " -> $branchC"
echo "Artifact version: $version"
echo " -> $versionC"
echo "Last commit msg: $msg"
echo " -> $msgC"
fail "Not doing version bump." 5
fi
versionFirstPart="${version%.*}"
versionLastPart="${version##*.}"
versionLastPartIncreased="$(expr $versionLastPart + 1)"
newVersion="$versionFirstPart.$versionLastPartIncreased-Snapshot"
mvn -s "$dir/m2settings.xml" deploy -Drepo.login=$REPO_LOGIN -Drepo.pwd=$REPO_PWD
set -x
git tag "$version"
"$dir/versionBumpLocal.sh" "$newVersion"
git commit -a -m "Prepare for development on $newVersion"
source "$dir/prepareGitForPush.sh"
git push origin "HEAD:$TRAVIS_BRANCH"
git push origin "$version"
|
#!/usr/bin/env bash
###############################################################################
# Copyright 2018 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd "${DIR}/.."
source "${DIR}/apollo_base.sh"
function start() {
LOG="${APOLLO_ROOT_DIR}/data/log/calibration.out"
CMD="cyber_launch start ${APOLLO_ROOT_DIR}/modules/control/launch/calibration.launch"
NUM_PROCESSES="$(pgrep -c -f "${APOLLO_ROOT_DIR}/modules/control/dag/calibration.dag")"
if [ "${NUM_PROCESSES}" -eq 0 ]; then
eval "nohup ${CMD} </dev/null >${LOG} 2>&1 &"
fi
}
function stop() {
pkill -SIGKILL -f calibration_component
}
# run command_name module_name
function run() {
case $1 in
start)
start
;;
stop)
stop
;;
*)
start
;;
esac
}
run "$1"
|
#! /bin/bash
#SBATCH -o /home/martin/workspace/sweet/benchmarks/rexi_tests_lrz_freq_waves/2015_12_27_scalability_rexi_fd/run_rexi_m000092_t008_n0064_r0001_a1.txt
###SBATCH -e /home/martin/workspace/sweet/benchmarks/rexi_tests_lrz_freq_waves/2015_12_27_scalability_rexi_fd/run_rexi_m000092_t008_n0064_r0001_a1.err
#SBATCH -J rexi_m000092_t008_n0064_r0001_a1
#SBATCH --get-user-env
#SBATCH --clusters=mpp2
#SBATCH --ntasks=1
#SBATCH --cpus-per-task=8
#SBATCH --exclusive
#SBATCH --export=NONE
#SBATCH --time=08:00:00
#declare -x NUMA_BLOCK_ALLOC_VERBOSITY=1
declare -x KMP_AFFINITY="granularity=thread,compact,1,0"
declare -x OMP_NUM_THREADS=8
echo "OMP_NUM_THREADS=$OMP_NUM_THREADS"
echo
. /etc/profile.d/modules.sh
module unload gcc
module unload fftw
module unload python
module load python/2.7_anaconda_nompi
module unload intel
module load intel/16.0
module unload mpi.intel
module load mpi.intel/5.1
module load gcc/5
cd /home/martin/workspace/sweet/benchmarks/rexi_tests_lrz_freq_waves/2015_12_27_scalability_rexi_fd
cd ../../../
. local_software/env_vars.sh
# force to use FFTW WISDOM data
declare -x SWEET_FFTW_LOAD_WISDOM_FROM_FILE="FFTW_WISDOM_nofreq_T8"
time -p mpiexec.hydra -genv OMP_NUM_THREADS 8 -envall -ppn 3 -n 1 ./build/rexi_m_tyes_a1 --initial-freq-x-mul=2.0 --initial-freq-y-mul=1.0 -f 1 -g 1 -H 1 -X 1 -Y 1 --compute-error 1 -t 50 -R 4 -C 0.3 -N 64 -U 0 -S 0 --use-specdiff-for-complex-array 1 --rexi-h 0.2 --timestepping-mode 1 --staggering 0 --rexi-m=92 -C -5.0
|
#!/bin/sh
#
# This script is used for adding and removing DNS records for VirtualBox vm instances, launched by Terraform
if [[ "${DNS_ACTION}" == "add" ]] && [[ $(grep -c "${IP} ${NODE_ALIAS} ${NODE_FQDN}" /etc/hosts) == 0 ]]; then
if (uname -a | grep -i 'darwin' >/dev/null); then # Host is macOS
echo "${PASSWORD}" | sudo -S -k sed -i "" '$a\
'"${IP} ${NODE_ALIAS} ${NODE_FQDN}"'
' ${DNS_FILE} # add the DNS A record
else # Host is Linux
echo "${PASSWORD}" | sudo -S -k sed -i '$a'"${IP} ${NODE_ALIAS} ${NODE_FQDN}"'' ${DNS_FILE} # add the DNS A record
fi
elif [[ "${DNS_ACTION}" == "remove" ]] && [[ "${PASSWORD}" != "!" ]]; then
if (uname -a | grep -i 'darwin' >/dev/null); then # Host is macOS
echo "${PASSWORD}" | sudo -S -k sed -i "" "/${NODE_FQDN}/d" ${DNS_FILE} # remove the DNS A record
else # Host is Linux
echo "${PASSWORD}" | sudo -S -k sed -i "/${NODE_FQDN}/d" ${DNS_FILE} # remove the DNS A record
fi
fi
|
<gh_stars>0
let value = 8000;
let x = 1;
let y = 10;
let z = 5;
let result = 1;
for (let i = value; i > 0; i = i - 1) {
x = x/i;
result = result + 6 * i;
}
console.log(result);
|
var fs = require('fs');
function getRecall(req, res) {
res.writeHead(200, {
'Content-Type': 'text/html;charset=utf-8'
});
function recall(data) {
res.write(data);
res.end('');
}
return recall;
}
module.exports = {
readImg: function(path, res) {
fs.readFile(path, 'binary', function(err, file) {
if (err) {
console.log(err);
res.end('文件不存在');
} else {
console.log('输出文件');
res.write(file, 'binary');
res.end();
}
})
},
readfile: function(path, recall) {
fs.readFile(path, function(err, file) {
if (err) {
console.log(err);
return;
} else {
recall(file);
}
});
}
}
|
<reponame>smagill/opensphere-desktop<gh_stars>10-100
package io.opensphere.mantle.data.geom.style.impl;
import java.awt.Color;
import io.opensphere.core.geometry.renderproperties.BaseAltitudeRenderProperties;
import io.opensphere.core.geometry.renderproperties.DefaultBaseAltitudeRenderProperties;
import io.opensphere.core.geometry.renderproperties.DefaultPointRenderProperties;
import io.opensphere.core.geometry.renderproperties.DefaultPointSizeRenderProperty;
import io.opensphere.core.geometry.renderproperties.PointRenderProperties;
import io.opensphere.core.geometry.renderproperties.PointSizeRenderProperty;
import io.opensphere.core.order.impl.DefaultOrderCategory;
import io.opensphere.mantle.data.BasicVisualizationInfo;
import io.opensphere.mantle.data.MapVisualizationInfo;
import io.opensphere.mantle.data.geom.factory.RenderPropertyPool;
import io.opensphere.mantle.data.geom.style.FeatureIndividualGeometryBuilderData;
import io.opensphere.mantle.data.geom.style.PointRenderPropertyFactory;
/** Helper for point render properties. */
public class PointRenderPropertiesHelper
{
/** The render property pool. */
private final RenderPropertyPool myRenderPropertyPool;
/**
* Constructor.
*
* @param renderPropertyPool The render property pool.
*/
public PointRenderPropertiesHelper(RenderPropertyPool renderPropertyPool)
{
myRenderPropertyPool = renderPropertyPool;
}
/**
* Gets the point size render properties if available.
*
* @param mapVisInfo the map vis info
* @param basicVisInfo the basic vis info
* @param size the size
* @param bd the {@link FeatureIndividualGeometryBuilderData}
* @param color the c
* @param rpf the rpf
* @return the point size render properties if available
*/
public PointRenderProperties getPointSizeRenderPropertiesIfAvailable(MapVisualizationInfo mapVisInfo,
BasicVisualizationInfo basicVisInfo, float size, FeatureIndividualGeometryBuilderData bd, Color color,
PointRenderPropertyFactory rpf)
{
PointSizeRenderProperty pointSizeRP = getPointSizeRenderPropertiesIfAvailable(myRenderPropertyPool, size);
BaseAltitudeRenderProperties brp = getBaseRenderPropertiesIfAvailable(myRenderPropertyPool, bd, basicVisInfo, mapVisInfo,
bd.getVS().isSelected(), color);
PointRenderProperties props = rpf == null ? new DefaultPointRenderProperties(brp, pointSizeRP)
: rpf.createPointRenderProperties(brp, pointSizeRP);
props.setColor(color);
props = myRenderPropertyPool.getPoolInstance(props);
return props;
}
/**
* Get the pooled base render properties or create one to put in the pool.
*
* @param renderPropertyPool the render property pool
* @param bd the {@link FeatureIndividualGeometryBuilderData}
* @param basicVisInfo Basic information for the data type.
* @param mapVisInfo Data type level info relevant for rendering.
* @param isSelected true when the geometry is selected
* @param color the geometry color
* @return The base render properties.
*/
private BaseAltitudeRenderProperties getBaseRenderPropertiesIfAvailable(RenderPropertyPool renderPropertyPool,
FeatureIndividualGeometryBuilderData bd, BasicVisualizationInfo basicVisInfo, MapVisualizationInfo mapVisInfo,
boolean isSelected, Color color)
{
int zOrder = bd.getVS().isSelected() ? DefaultOrderCategory.FEATURE_CATEGORY.getOrderRange().getMaximum().intValue()
: mapVisInfo == null ? DefaultOrderCategory.FEATURE_CATEGORY.getOrderRange().getMinimum().intValue()
: mapVisInfo.getZOrder();
boolean pickable = basicVisInfo != null && basicVisInfo.getLoadsTo().isPickable();
DefaultBaseAltitudeRenderProperties baseProps = new DefaultBaseAltitudeRenderProperties(zOrder, true, pickable, false);
baseProps.setColor(color);
baseProps.setRenderingOrder(isSelected ? 1 : 0);
return renderPropertyPool.getPoolInstance(baseProps);
}
/**
* Gets the point size render properties if available, if not creates a new
* one and adds it to the share with the provided size.
*
* @param renderPropertyPool the render property pool
* @param size the default point size
* @return the point size render properties if available
*/
private PointSizeRenderProperty getPointSizeRenderPropertiesIfAvailable(RenderPropertyPool renderPropertyPool, float size)
{
PointSizeRenderProperty pointSizeRP = new DefaultPointSizeRenderProperty();
pointSizeRP.setSize(size);
pointSizeRP.setHighlightSize(size);
pointSizeRP = renderPropertyPool.getPoolInstance(pointSizeRP);
return pointSizeRP;
}
}
|
package v1.event;
import akka.actor.ActorSystem;
import play.libs.concurrent.CustomExecutionContext;
import javax.inject.Inject;
/**
* Custom execution context wired to "event.repository" thread pool
*/
public class EventExecutionContext extends CustomExecutionContext {
@Inject
public EventExecutionContext(ActorSystem actorSystem) {
super(actorSystem, "event.repository");
}
}
|
<filename>jhiRoot/plantsMS/src/main/java/fr/syncrase/ecosyst/service/criteria/ClassificationCronquistCriteria.java<gh_stars>1-10
package fr.syncrase.ecosyst.service.criteria;
import java.io.Serializable;
import java.util.Objects;
import tech.jhipster.service.Criteria;
import tech.jhipster.service.filter.BooleanFilter;
import tech.jhipster.service.filter.DoubleFilter;
import tech.jhipster.service.filter.Filter;
import tech.jhipster.service.filter.FloatFilter;
import tech.jhipster.service.filter.IntegerFilter;
import tech.jhipster.service.filter.LongFilter;
import tech.jhipster.service.filter.StringFilter;
/**
* Criteria class for the {@link fr.syncrase.ecosyst.domain.ClassificationCronquist} entity. This class is used
* in {@link fr.syncrase.ecosyst.web.rest.ClassificationCronquistResource} to receive all the possible filtering options from
* the Http GET request parameters.
* For example the following could be a valid request:
* {@code /classification-cronquists?id.greaterThan=5&attr1.contains=something&attr2.specified=false}
* As Spring is unable to properly convert the types, unless specific {@link Filter} class are used, we need to use
* fix type specific filters.
*/
public class ClassificationCronquistCriteria implements Serializable, Criteria {
private static final long serialVersionUID = 1L;
private LongFilter id;
private StringFilter superRegne;
private StringFilter regne;
private StringFilter sousRegne;
private StringFilter rameau;
private StringFilter infraRegne;
private StringFilter superEmbranchement;
private StringFilter division;
private StringFilter sousEmbranchement;
private StringFilter infraEmbranchement;
private StringFilter microEmbranchement;
private StringFilter superClasse;
private StringFilter classe;
private StringFilter sousClasse;
private StringFilter infraClasse;
private StringFilter superOrdre;
private StringFilter ordre;
private StringFilter sousOrdre;
private StringFilter infraOrdre;
private StringFilter microOrdre;
private StringFilter superFamille;
private StringFilter famille;
private StringFilter sousFamille;
private StringFilter tribu;
private StringFilter sousTribu;
private StringFilter genre;
private StringFilter sousGenre;
private StringFilter section;
private StringFilter sousSection;
private StringFilter espece;
private StringFilter sousEspece;
private StringFilter variete;
private StringFilter sousVariete;
private StringFilter forme;
private LongFilter planteId;
private Boolean distinct;
public ClassificationCronquistCriteria() {}
public ClassificationCronquistCriteria(ClassificationCronquistCriteria other) {
this.id = other.id == null ? null : other.id.copy();
this.superRegne = other.superRegne == null ? null : other.superRegne.copy();
this.regne = other.regne == null ? null : other.regne.copy();
this.sousRegne = other.sousRegne == null ? null : other.sousRegne.copy();
this.rameau = other.rameau == null ? null : other.rameau.copy();
this.infraRegne = other.infraRegne == null ? null : other.infraRegne.copy();
this.superEmbranchement = other.superEmbranchement == null ? null : other.superEmbranchement.copy();
this.division = other.division == null ? null : other.division.copy();
this.sousEmbranchement = other.sousEmbranchement == null ? null : other.sousEmbranchement.copy();
this.infraEmbranchement = other.infraEmbranchement == null ? null : other.infraEmbranchement.copy();
this.microEmbranchement = other.microEmbranchement == null ? null : other.microEmbranchement.copy();
this.superClasse = other.superClasse == null ? null : other.superClasse.copy();
this.classe = other.classe == null ? null : other.classe.copy();
this.sousClasse = other.sousClasse == null ? null : other.sousClasse.copy();
this.infraClasse = other.infraClasse == null ? null : other.infraClasse.copy();
this.superOrdre = other.superOrdre == null ? null : other.superOrdre.copy();
this.ordre = other.ordre == null ? null : other.ordre.copy();
this.sousOrdre = other.sousOrdre == null ? null : other.sousOrdre.copy();
this.infraOrdre = other.infraOrdre == null ? null : other.infraOrdre.copy();
this.microOrdre = other.microOrdre == null ? null : other.microOrdre.copy();
this.superFamille = other.superFamille == null ? null : other.superFamille.copy();
this.famille = other.famille == null ? null : other.famille.copy();
this.sousFamille = other.sousFamille == null ? null : other.sousFamille.copy();
this.tribu = other.tribu == null ? null : other.tribu.copy();
this.sousTribu = other.sousTribu == null ? null : other.sousTribu.copy();
this.genre = other.genre == null ? null : other.genre.copy();
this.sousGenre = other.sousGenre == null ? null : other.sousGenre.copy();
this.section = other.section == null ? null : other.section.copy();
this.sousSection = other.sousSection == null ? null : other.sousSection.copy();
this.espece = other.espece == null ? null : other.espece.copy();
this.sousEspece = other.sousEspece == null ? null : other.sousEspece.copy();
this.variete = other.variete == null ? null : other.variete.copy();
this.sousVariete = other.sousVariete == null ? null : other.sousVariete.copy();
this.forme = other.forme == null ? null : other.forme.copy();
this.planteId = other.planteId == null ? null : other.planteId.copy();
this.distinct = other.distinct;
}
@Override
public ClassificationCronquistCriteria copy() {
return new ClassificationCronquistCriteria(this);
}
public LongFilter getId() {
return id;
}
public LongFilter id() {
if (id == null) {
id = new LongFilter();
}
return id;
}
public void setId(LongFilter id) {
this.id = id;
}
public StringFilter getSuperRegne() {
return superRegne;
}
public StringFilter superRegne() {
if (superRegne == null) {
superRegne = new StringFilter();
}
return superRegne;
}
public void setSuperRegne(StringFilter superRegne) {
this.superRegne = superRegne;
}
public StringFilter getRegne() {
return regne;
}
public StringFilter regne() {
if (regne == null) {
regne = new StringFilter();
}
return regne;
}
public void setRegne(StringFilter regne) {
this.regne = regne;
}
public StringFilter getSousRegne() {
return sousRegne;
}
public StringFilter sousRegne() {
if (sousRegne == null) {
sousRegne = new StringFilter();
}
return sousRegne;
}
public void setSousRegne(StringFilter sousRegne) {
this.sousRegne = sousRegne;
}
public StringFilter getRameau() {
return rameau;
}
public StringFilter rameau() {
if (rameau == null) {
rameau = new StringFilter();
}
return rameau;
}
public void setRameau(StringFilter rameau) {
this.rameau = rameau;
}
public StringFilter getInfraRegne() {
return infraRegne;
}
public StringFilter infraRegne() {
if (infraRegne == null) {
infraRegne = new StringFilter();
}
return infraRegne;
}
public void setInfraRegne(StringFilter infraRegne) {
this.infraRegne = infraRegne;
}
public StringFilter getSuperEmbranchement() {
return superEmbranchement;
}
public StringFilter superEmbranchement() {
if (superEmbranchement == null) {
superEmbranchement = new StringFilter();
}
return superEmbranchement;
}
public void setSuperEmbranchement(StringFilter superEmbranchement) {
this.superEmbranchement = superEmbranchement;
}
public StringFilter getDivision() {
return division;
}
public StringFilter division() {
if (division == null) {
division = new StringFilter();
}
return division;
}
public void setDivision(StringFilter division) {
this.division = division;
}
public StringFilter getSousEmbranchement() {
return sousEmbranchement;
}
public StringFilter sousEmbranchement() {
if (sousEmbranchement == null) {
sousEmbranchement = new StringFilter();
}
return sousEmbranchement;
}
public void setSousEmbranchement(StringFilter sousEmbranchement) {
this.sousEmbranchement = sousEmbranchement;
}
public StringFilter getInfraEmbranchement() {
return infraEmbranchement;
}
public StringFilter infraEmbranchement() {
if (infraEmbranchement == null) {
infraEmbranchement = new StringFilter();
}
return infraEmbranchement;
}
public void setInfraEmbranchement(StringFilter infraEmbranchement) {
this.infraEmbranchement = infraEmbranchement;
}
public StringFilter getMicroEmbranchement() {
return microEmbranchement;
}
public StringFilter microEmbranchement() {
if (microEmbranchement == null) {
microEmbranchement = new StringFilter();
}
return microEmbranchement;
}
public void setMicroEmbranchement(StringFilter microEmbranchement) {
this.microEmbranchement = microEmbranchement;
}
public StringFilter getSuperClasse() {
return superClasse;
}
public StringFilter superClasse() {
if (superClasse == null) {
superClasse = new StringFilter();
}
return superClasse;
}
public void setSuperClasse(StringFilter superClasse) {
this.superClasse = superClasse;
}
public StringFilter getClasse() {
return classe;
}
public StringFilter classe() {
if (classe == null) {
classe = new StringFilter();
}
return classe;
}
public void setClasse(StringFilter classe) {
this.classe = classe;
}
public StringFilter getSousClasse() {
return sousClasse;
}
public StringFilter sousClasse() {
if (sousClasse == null) {
sousClasse = new StringFilter();
}
return sousClasse;
}
public void setSousClasse(StringFilter sousClasse) {
this.sousClasse = sousClasse;
}
public StringFilter getInfraClasse() {
return infraClasse;
}
public StringFilter infraClasse() {
if (infraClasse == null) {
infraClasse = new StringFilter();
}
return infraClasse;
}
public void setInfraClasse(StringFilter infraClasse) {
this.infraClasse = infraClasse;
}
public StringFilter getSuperOrdre() {
return superOrdre;
}
public StringFilter superOrdre() {
if (superOrdre == null) {
superOrdre = new StringFilter();
}
return superOrdre;
}
public void setSuperOrdre(StringFilter superOrdre) {
this.superOrdre = superOrdre;
}
public StringFilter getOrdre() {
return ordre;
}
public StringFilter ordre() {
if (ordre == null) {
ordre = new StringFilter();
}
return ordre;
}
public void setOrdre(StringFilter ordre) {
this.ordre = ordre;
}
public StringFilter getSousOrdre() {
return sousOrdre;
}
public StringFilter sousOrdre() {
if (sousOrdre == null) {
sousOrdre = new StringFilter();
}
return sousOrdre;
}
public void setSousOrdre(StringFilter sousOrdre) {
this.sousOrdre = sousOrdre;
}
public StringFilter getInfraOrdre() {
return infraOrdre;
}
public StringFilter infraOrdre() {
if (infraOrdre == null) {
infraOrdre = new StringFilter();
}
return infraOrdre;
}
public void setInfraOrdre(StringFilter infraOrdre) {
this.infraOrdre = infraOrdre;
}
public StringFilter getMicroOrdre() {
return microOrdre;
}
public StringFilter microOrdre() {
if (microOrdre == null) {
microOrdre = new StringFilter();
}
return microOrdre;
}
public void setMicroOrdre(StringFilter microOrdre) {
this.microOrdre = microOrdre;
}
public StringFilter getSuperFamille() {
return superFamille;
}
public StringFilter superFamille() {
if (superFamille == null) {
superFamille = new StringFilter();
}
return superFamille;
}
public void setSuperFamille(StringFilter superFamille) {
this.superFamille = superFamille;
}
public StringFilter getFamille() {
return famille;
}
public StringFilter famille() {
if (famille == null) {
famille = new StringFilter();
}
return famille;
}
public void setFamille(StringFilter famille) {
this.famille = famille;
}
public StringFilter getSousFamille() {
return sousFamille;
}
public StringFilter sousFamille() {
if (sousFamille == null) {
sousFamille = new StringFilter();
}
return sousFamille;
}
public void setSousFamille(StringFilter sousFamille) {
this.sousFamille = sousFamille;
}
public StringFilter getTribu() {
return tribu;
}
public StringFilter tribu() {
if (tribu == null) {
tribu = new StringFilter();
}
return tribu;
}
public void setTribu(StringFilter tribu) {
this.tribu = tribu;
}
public StringFilter getSousTribu() {
return sousTribu;
}
public StringFilter sousTribu() {
if (sousTribu == null) {
sousTribu = new StringFilter();
}
return sousTribu;
}
public void setSousTribu(StringFilter sousTribu) {
this.sousTribu = sousTribu;
}
public StringFilter getGenre() {
return genre;
}
public StringFilter genre() {
if (genre == null) {
genre = new StringFilter();
}
return genre;
}
public void setGenre(StringFilter genre) {
this.genre = genre;
}
public StringFilter getSousGenre() {
return sousGenre;
}
public StringFilter sousGenre() {
if (sousGenre == null) {
sousGenre = new StringFilter();
}
return sousGenre;
}
public void setSousGenre(StringFilter sousGenre) {
this.sousGenre = sousGenre;
}
public StringFilter getSection() {
return section;
}
public StringFilter section() {
if (section == null) {
section = new StringFilter();
}
return section;
}
public void setSection(StringFilter section) {
this.section = section;
}
public StringFilter getSousSection() {
return sousSection;
}
public StringFilter sousSection() {
if (sousSection == null) {
sousSection = new StringFilter();
}
return sousSection;
}
public void setSousSection(StringFilter sousSection) {
this.sousSection = sousSection;
}
public StringFilter getEspece() {
return espece;
}
public StringFilter espece() {
if (espece == null) {
espece = new StringFilter();
}
return espece;
}
public void setEspece(StringFilter espece) {
this.espece = espece;
}
public StringFilter getSousEspece() {
return sousEspece;
}
public StringFilter sousEspece() {
if (sousEspece == null) {
sousEspece = new StringFilter();
}
return sousEspece;
}
public void setSousEspece(StringFilter sousEspece) {
this.sousEspece = sousEspece;
}
public StringFilter getVariete() {
return variete;
}
public StringFilter variete() {
if (variete == null) {
variete = new StringFilter();
}
return variete;
}
public void setVariete(StringFilter variete) {
this.variete = variete;
}
public StringFilter getSousVariete() {
return sousVariete;
}
public StringFilter sousVariete() {
if (sousVariete == null) {
sousVariete = new StringFilter();
}
return sousVariete;
}
public void setSousVariete(StringFilter sousVariete) {
this.sousVariete = sousVariete;
}
public StringFilter getForme() {
return forme;
}
public StringFilter forme() {
if (forme == null) {
forme = new StringFilter();
}
return forme;
}
public void setForme(StringFilter forme) {
this.forme = forme;
}
public LongFilter getPlanteId() {
return planteId;
}
public LongFilter planteId() {
if (planteId == null) {
planteId = new LongFilter();
}
return planteId;
}
public void setPlanteId(LongFilter planteId) {
this.planteId = planteId;
}
public Boolean getDistinct() {
return distinct;
}
public void setDistinct(Boolean distinct) {
this.distinct = distinct;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final ClassificationCronquistCriteria that = (ClassificationCronquistCriteria) o;
return (
Objects.equals(id, that.id) &&
Objects.equals(superRegne, that.superRegne) &&
Objects.equals(regne, that.regne) &&
Objects.equals(sousRegne, that.sousRegne) &&
Objects.equals(rameau, that.rameau) &&
Objects.equals(infraRegne, that.infraRegne) &&
Objects.equals(superEmbranchement, that.superEmbranchement) &&
Objects.equals(division, that.division) &&
Objects.equals(sousEmbranchement, that.sousEmbranchement) &&
Objects.equals(infraEmbranchement, that.infraEmbranchement) &&
Objects.equals(microEmbranchement, that.microEmbranchement) &&
Objects.equals(superClasse, that.superClasse) &&
Objects.equals(classe, that.classe) &&
Objects.equals(sousClasse, that.sousClasse) &&
Objects.equals(infraClasse, that.infraClasse) &&
Objects.equals(superOrdre, that.superOrdre) &&
Objects.equals(ordre, that.ordre) &&
Objects.equals(sousOrdre, that.sousOrdre) &&
Objects.equals(infraOrdre, that.infraOrdre) &&
Objects.equals(microOrdre, that.microOrdre) &&
Objects.equals(superFamille, that.superFamille) &&
Objects.equals(famille, that.famille) &&
Objects.equals(sousFamille, that.sousFamille) &&
Objects.equals(tribu, that.tribu) &&
Objects.equals(sousTribu, that.sousTribu) &&
Objects.equals(genre, that.genre) &&
Objects.equals(sousGenre, that.sousGenre) &&
Objects.equals(section, that.section) &&
Objects.equals(sousSection, that.sousSection) &&
Objects.equals(espece, that.espece) &&
Objects.equals(sousEspece, that.sousEspece) &&
Objects.equals(variete, that.variete) &&
Objects.equals(sousVariete, that.sousVariete) &&
Objects.equals(forme, that.forme) &&
Objects.equals(planteId, that.planteId) &&
Objects.equals(distinct, that.distinct)
);
}
@Override
public int hashCode() {
return Objects.hash(
id,
superRegne,
regne,
sousRegne,
rameau,
infraRegne,
superEmbranchement,
division,
sousEmbranchement,
infraEmbranchement,
microEmbranchement,
superClasse,
classe,
sousClasse,
infraClasse,
superOrdre,
ordre,
sousOrdre,
infraOrdre,
microOrdre,
superFamille,
famille,
sousFamille,
tribu,
sousTribu,
genre,
sousGenre,
section,
sousSection,
espece,
sousEspece,
variete,
sousVariete,
forme,
planteId,
distinct
);
}
// prettier-ignore
@Override
public String toString() {
return "ClassificationCronquistCriteria{" +
(id != null ? "id=" + id + ", " : "") +
(superRegne != null ? "superRegne=" + superRegne + ", " : "") +
(regne != null ? "regne=" + regne + ", " : "") +
(sousRegne != null ? "sousRegne=" + sousRegne + ", " : "") +
(rameau != null ? "rameau=" + rameau + ", " : "") +
(infraRegne != null ? "infraRegne=" + infraRegne + ", " : "") +
(superEmbranchement != null ? "superEmbranchement=" + superEmbranchement + ", " : "") +
(division != null ? "division=" + division + ", " : "") +
(sousEmbranchement != null ? "sousEmbranchement=" + sousEmbranchement + ", " : "") +
(infraEmbranchement != null ? "infraEmbranchement=" + infraEmbranchement + ", " : "") +
(microEmbranchement != null ? "microEmbranchement=" + microEmbranchement + ", " : "") +
(superClasse != null ? "superClasse=" + superClasse + ", " : "") +
(classe != null ? "classe=" + classe + ", " : "") +
(sousClasse != null ? "sousClasse=" + sousClasse + ", " : "") +
(infraClasse != null ? "infraClasse=" + infraClasse + ", " : "") +
(superOrdre != null ? "superOrdre=" + superOrdre + ", " : "") +
(ordre != null ? "ordre=" + ordre + ", " : "") +
(sousOrdre != null ? "sousOrdre=" + sousOrdre + ", " : "") +
(infraOrdre != null ? "infraOrdre=" + infraOrdre + ", " : "") +
(microOrdre != null ? "microOrdre=" + microOrdre + ", " : "") +
(superFamille != null ? "superFamille=" + superFamille + ", " : "") +
(famille != null ? "famille=" + famille + ", " : "") +
(sousFamille != null ? "sousFamille=" + sousFamille + ", " : "") +
(tribu != null ? "tribu=" + tribu + ", " : "") +
(sousTribu != null ? "sousTribu=" + sousTribu + ", " : "") +
(genre != null ? "genre=" + genre + ", " : "") +
(sousGenre != null ? "sousGenre=" + sousGenre + ", " : "") +
(section != null ? "section=" + section + ", " : "") +
(sousSection != null ? "sousSection=" + sousSection + ", " : "") +
(espece != null ? "espece=" + espece + ", " : "") +
(sousEspece != null ? "sousEspece=" + sousEspece + ", " : "") +
(variete != null ? "variete=" + variete + ", " : "") +
(sousVariete != null ? "sousVariete=" + sousVariete + ", " : "") +
(forme != null ? "forme=" + forme + ", " : "") +
(planteId != null ? "planteId=" + planteId + ", " : "") +
(distinct != null ? "distinct=" + distinct + ", " : "") +
"}";
}
}
|
#!/usr/bin/env bash
configfile=/usr/src/app/config/AnovaMaster.cfg
if [ ! -f $configfile ]; then
cp /usr/src/app/AnovaMaster.cfg.sample $configfile
fi
python run.py
|
package cyclops.container.chain;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import cyclops.container.traversable.IterableX;
import cyclops.container.control.Option;
import cyclops.container.immutable.impl.Chain;
import cyclops.container.immutable.ImmutableList;
import cyclops.container.immutable.impl.Seq;
import cyclops.container.basetests.BaseImmutableListTest;
import cyclops.container.immutable.tuple.Tuple2;
import cyclops.reactive.ReactiveSeq;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.function.UnaryOperator;
import java.util.stream.Stream;
import org.junit.Test;
public class AppendTest extends BaseImmutableListTest {
@Override
protected <T> ImmutableList<T> fromStream(Stream<T> s) {
Chain<T> res = Chain.empty();
for (T next : ReactiveSeq.fromStream(s)) {
res = res.append(next);
}
return res;
}
@Override
public <T> ImmutableList<T> of(T... values) {
Chain<T> res = Chain.empty();
for (T next : values) {
res = res.append(next);
}
return res;
}
@Override
public IterableX<Integer> range(int start,
int end) {
Chain<Integer> res = Chain.empty();
for (Integer next : ReactiveSeq.range(start,
end)) {
res = res.append(next);
}
return res;
}
@Override
public IterableX<Long> rangeLong(long start,
long end) {
Chain<Long> res = Chain.empty();
for (Long next : ReactiveSeq.rangeLong(start,
end)) {
res = res.append(next);
}
return res;
}
@Override
public <T> IterableX<T> iterate(int times,
T seed,
UnaryOperator<T> fn) {
Chain<T> res = Chain.empty();
for (T next : ReactiveSeq.<T>iterate(seed,
fn).take(times)) {
res = res.append(next);
}
return res;
}
@Override
public <T> IterableX<T> generate(int times,
Supplier<T> fn) {
Chain<T> res = Chain.empty();
for (T next : ReactiveSeq.<T>generate(fn).take(times)) {
res = res.append(next);
}
return res;
}
@Override
public <U, T> IterableX<T> unfold(U seed,
Function<? super U, Option<Tuple2<T, U>>> unfolder) {
Chain<T> res = Chain.empty();
for (T next : ReactiveSeq.unfold(seed,
unfolder)) {
res = res.append(next);
}
return res;
}
@Override
public <T> ImmutableList<T> empty() {
return Chain.empty();
}
@Test
public void prependAllTests() {
assertThat(of(1,
2,
3,
4,
5,
6,
7).prependAll(10,
11,
12),
equalTo(of(10,
11,
12,
1,
2,
3,
4,
5,
6,
7)));
assertThat(of(1,
2,
3,
4,
5,
6,
7).prependAll(Seq.of(10,
11,
12)),
equalTo(of(10,
11,
12,
1,
2,
3,
4,
5,
6,
7)));
}
@Override
public void testCycleWhile() {
}
@Override
public void testCycleUntil() {
}
@Override
public void testCycleWhileNoOrd() {
}
@Override
public void testCycleUntilNoOrd() {
}
}
|
#include <cxxabi.h>
#include <cstdint>
#include <memory>
#include <vector>
#include <cstdlib>
#include <algorithm>
#include <set>
#include <string>
#include <tuple>
#include <utility>
#include <functional>
#include "capstone/capstone.h"
#include "gflags/gflags.h"
#include "glog/logging.h"
#include "llvm/Object/ObjectFile.h"
#include "Cfg.hpp"
#include "CfgRes.hpp"
#include "MemRange.hpp"
#include "MemBitMap.hpp"
#include "AbiOracle.hpp"
#include "Utils.hpp"
#include "CapstoneHelper.hpp"
#include "Block.hpp"
#include "CpuState.hpp"
#include "PerfDefs.hpp"
#include "EventManager.hpp"
#include "MemoryMap.hpp"
#include "SymResolver.hpp"
#define MAX_BLOCK_SIZE 0x1000
DEFINE_bool(printholes, false, "Pretty prints the current holes in the text section after main analysis, before linear sweep");
DEFINE_bool(lin_sweep, true, "Toggle linear sweeps over the binary looking for funcs");
Cfg::Cfg(const ObjectFile *obj, std::shared_ptr<SymResolver> resolver, std::shared_ptr<MemoryMap> memmap, std::shared_ptr<EventManager> events) :
m_obj(obj),
m_resolver(std::move(resolver)),
m_memmap(std::move(memmap)),
m_ep(0),
m_switches_found(0),
m_sweep_funcs_found(0),
m_bitmap(new MemBitMap()),
m_bin_type(get_bin_type(obj)),
m_state(nullptr),
m_events(std::move(events)) {}
Cfg::~Cfg() {
cs_close(&m_cs_handle);
delete m_state;
}
const std::map<block_range, Block, block_cmp> *Cfg::get_cfg_map() const {
return &m_blocks;
}
const uint64_t Cfg::get_switch_count() const {
return m_switches_found;
}
const uint64_t Cfg::get_sweep_count() const {
return m_sweep_funcs_found;
}
void Cfg::print_cfg() {
csh cs_handle;
unsigned int obj_arch = m_obj->getArch();
std::tuple<cs_arch, cs_mode> arch_tup = map_triple_cs(obj_arch);
m_arch = std::get<0>(arch_tup);
cs_mode mode = std::get<1>(arch_tup);
cs_err err;
err = cs_open(m_arch, mode, &cs_handle);
if (err != CS_ERR_OK) {
LOG(ERROR) << "cs_open: " << cs_strerror(err);
return;
}
cs_option(cs_handle, CS_OPT_DETAIL, CS_OPT_ON);
for (const auto &kv : m_blocks) {
Block block = kv.second;
uint64_t block_size = block.end - block.start;
uint64_t block_addr = block.start;
LOG(INFO) << std::endl << "Block: 0x" << std::hex << block_addr << " size: 0x" << block_size;
cs_insn *insn = cs_malloc(cs_handle);
const uint8_t *data_ptr = block.data;
cs_option(cs_handle, CS_OPT_MODE, block.mode);
while(cs_disasm_iter(cs_handle, &data_ptr, &block_size, &block_addr, insn)) {
cs_insn cur_insn = *insn;
LOG(INFO) << "0x" << std::hex << insn->address << ": " << insn->mnemonic << " " << insn->op_str;
}
cs_free(insn, 1);
LOG(INFO) << "Leaders:";
for (const auto &leader : block.leaders) {
LOG(INFO) << " 0x" << std::hex << leader;
}
LOG(INFO) << "Followers:";
for (const auto &follower : block.followers) {
LOG(INFO) << " 0x" << std::hex << follower;
}
if (block.is_func_head) {
LOG(INFO) << "Callers:";
for (const auto &follower : block.callers) {
LOG(INFO) << " 0x" << std::hex << follower;
}
}
if (block.branch_target) {
LOG(INFO) << "Branch target: 0x" << std::hex << block.branch_target;
}
LOG(INFO) << "Call target: " << block.is_func_head;
LOG(INFO) << "Function addr: 0x" << std::hex << block.func_addr;
LOG(INFO) << "Splits insn: " << block.splits_insn;
}
cs_close(&cs_handle);
}
void Cfg::print_holes() {
std::vector<MemRange> holes = this->m_bitmap->get_unset_ranges(m_memmap.get(), MapFlag::ANY);
LOG(INFO) << "TEXT holes:";
for (const auto &hole : holes) {
LOG(INFO) << " " << "0x" << std::hex << hole.addr << " : 0x" << hole.addr + hole.size << " | size: 0x" << hole.size;
}
}
int Cfg::create_cfg(uint64_t ep) {
unsigned int obj_arch = m_obj->getArch();
std::tuple<cs_arch, cs_mode> arch_tup = map_triple_cs(obj_arch);
m_arch = std::get<0>(arch_tup);
cs_mode mode = std::get<1>(arch_tup);
this->populate_func_tables();
for (const auto &page : m_memmap->get_text_pages()) {
if (!page.second) {
continue;
}
m_bitmap->add_map(page.first, page.second);
}
m_state = new CpuState(m_arch, m_memmap);
if (!m_state) {
LOG(FATAL) << "Failed to create CpuState object";
}
cs_err err;
err = cs_open(m_arch, mode, &m_cs_handle);
if (err != CS_ERR_OK) {
LOG(ERROR) << "cs_open: " << cs_strerror(err);
return 1;
}
cs_option(m_cs_handle, CS_OPT_DETAIL, CS_OPT_ON);
// Queue the entry point
if (ep) {
do {
auto *ep_page = m_memmap->addr_to_page(ep);
if (!ep_page) {
LOG(WARNING) << "Invalid ep addr: 0x" << std::hex << ep;
break;
}
if (ep_page->empty_page) {
LOG(WARNING) << "Skipping ep addr due to virtual page: 0x" << std::hex << ep;
break;
}
m_ep = ep;
Block ep_block(m_ep);
ep_block.mode = mode;
if (m_arch == cs_arch::CS_ARCH_ARM) {
if (ep_block.start % 2 != 0) {
ep_block.mode = cs_mode::CS_MODE_THUMB;
ep_block.start -= 1;
m_ep = ep_block.start;
}
}
m_func_queue.push(ep_block);
VLOG(VLOG_CFG) << "Pushing ep: 0x" << std::hex << ep_block.start;
} while(false);
}
// Queue all exported symbols
for (const auto &kv : m_resolver->get_syms_by_addr()) {
Symbol sym = kv.second;
if (sym.type != sym_type::EXPORT || sym.obj_type != sym_obj_type::FUNC) {
continue;
}
if(!m_memmap->is_text_sec(kv.first)) {
VLOG(VLOG_CFG) << "Skipping symbol function 0x" << std::hex << kv.first << " because its not a TEXT section";
continue;
}
auto *sym_page = m_memmap->addr_to_page(kv.first);
if (!sym_page) {
LOG(WARNING) << "Invalid symbol addr, no page: 0x" << std::hex << kv.first;
continue;
}
if (sym_page->empty_page) {
VLOG(VLOG_CFG) << "Skipping symbol due to empty page: 0x" << std::hex << kv.first;
continue;
}
Block sym_block(kv.first);
sym_block.mode = mode;
if (m_arch == cs_arch::CS_ARCH_ARM) {
if (sym.is_thumb) {
sym_block.mode = cs_mode::CS_MODE_THUMB;
}
}
VLOG(VLOG_CFG) << "Pushing sym: 0x" << std::hex << sym_block.start;
m_func_queue.push(sym_block);
}
// First process all normal functions, then process found functions.
// This helps reduce CFG false positives (for example CFI on windows includes both)
// functions and jump targets which can cause split functions.
this->process_func_queue();
// Queue all extra found functions (eg from CFG or eh_frame)
for (const auto &kv : m_resolver->get_found_funcs()) {
if(!m_memmap->is_text_sec(kv.first)) {
VLOG(VLOG_CFG) << "Skipping found function 0x" << std::hex << kv.first << " because its not a TEXT section";
continue;
}
auto *found_page = m_memmap->addr_to_page(kv.first);
if (!found_page) {
LOG(WARNING) << "Skipping invalid addr, no page: 0x" << std::hex << kv.first;
continue;
}
if (found_page->empty_page) {
VLOG(VLOG_CFG) << "Skipping found function, empty page: 0x" << std::hex << kv.first;
continue;
}
if (m_blocks.count(make_range(kv.first))) {
continue;
}
Block found_block(kv.first);
found_block.mode = mode;
if (m_arch == cs_arch::CS_ARCH_ARM) {
if (found_block.start % 2 != 0) {
found_block.mode = cs_mode::CS_MODE_THUMB;
found_block.start -= 1;
}
}
found_block.metadata.emplace(bb_metadata::FOUND_BLOCK, MetaData());
VLOG(VLOG_CFG) << "Pushing found: 0x" << std::hex << found_block.start;
m_func_queue.push(found_block);
}
// Process any newly added functions in the found queue.
this->process_func_queue();
m_state->clear_vstack();
if (FLAGS_printholes) {
this->print_holes();
}
if (FLAGS_lin_sweep) {
this->do_linear_scan();
}
this->post_process_blocks();
// Manually clean up our bitmap;
this->m_bitmap.reset();
CHECK(m_block_queue.empty() && m_func_queue.empty()) << "Failed to exhaust all blocks and functions";
return 0;
}
void Cfg::populate_func_tables() {
for (const auto &kv : m_resolver->get_syms_by_addr()) {
// Populate any functions that exit with noreturn
if (kv.second.name == "__stack_chk_fail") {
m_exit_funcs.emplace(kv.first);
}
if (kv.second.name == "___stack_chk_fail") {
m_exit_funcs.emplace(kv.first);
}
if (kv.second.name == "__dl___stack_chk_fail") {
m_exit_funcs.emplace(kv.first);
}
if (kv.second.name == "exit") {
m_exit_funcs.emplace(kv.first);
}
if (kv.second.name == "_exit") {
m_exit_funcs.emplace(kv.first);
}
if (kv.second.name == "__cxa_bad_cast") {
m_exit_funcs.emplace(kv.first);
}
if (kv.second.name == "__cxa_bad_typeid") {
m_exit_funcs.emplace(kv.first);
}
if (kv.second.name == "__assert_fail") {
m_exit_funcs.emplace(kv.first);
}
if (kv.second.name == "abort") {
m_exit_funcs.emplace(kv.first);
}
if (kv.second.name == "__android_log_assert") {
m_exit_funcs.emplace(kv.first);
}
std::string demangled_sym;
int status;
char *ret = abi::__cxa_demangle(kv.second.name.c_str(), 0, 0, &status);
if (status == 0) {
demangled_sym = std::string(ret);
}
else {
continue;
}
free(ret);
if (demangled_sym == "std::terminate()") {
m_exit_funcs.emplace(kv.first);
}
if (demangled_sym == "__gnu_cxx::__throw_concurrence_lock_error()") {
m_exit_funcs.emplace(kv.first);
}
if (demangled_sym.find("__throw_out_of_range_fmt") != std::string::npos) {
m_exit_funcs.emplace(kv.first);
}
if (demangled_sym.find("std::__throw_length_error") != std::string::npos) {
m_exit_funcs.emplace(kv.first);
}
}
}
int Cfg::process_func_queue() {
while (!m_func_queue.empty()) {
Block func_block = m_func_queue.pop();
m_state->at_func_start(&func_block);
VLOG(VLOG_CFG) << "Starting function at: 0x" << std::hex << func_block.start;
if (this->process_block_queue(func_block, func_block.start)) {
LOG(FATAL) << "Failed to process block: 0x" << std::hex << func_block.start;
}
// Exhaust all switches found in this function
while (!m_state->get_switch_tables()->empty()) {
this->process_switches();
}
m_state->at_func_end();
}
return 0;
}
int Cfg::process_block_queue(Block func_block, uint64_t func_addr) {
if (func_addr) {
func_block.is_func_head = true;
func_block.func_addr = func_addr;
}
m_block_queue.push(func_block);
return this->process_block_queue();
}
int Cfg::process_block_queue() {
while (!m_block_queue.empty()) {
Block cur_block = m_block_queue.pop();
m_state->at_block_start(&cur_block);
if (this->process_block(&cur_block)) {
continue;
}
m_state->at_block_end(&cur_block);
this->process_fixups(&cur_block);
auto ret_pair = m_blocks.emplace(make_range(cur_block.start, cur_block.end - 1), cur_block);
if (!ret_pair.second) {
LOG(ERROR) << "Failed to emplace new block: 0x" << std::hex << cur_block.start;
continue;
}
// LOG(INFO) << "block bit range: 0x" << std::hex << cur_block.start << " size: 0x" << (cur_block.end - cur_block.start);
m_bitmap->set_bit_range(cur_block.start, (cur_block.end - cur_block.start), MapFlag::BLOCK);
}
return 0;
}
int Cfg::process_block(Block *block) {
const uint8_t *data_ptr = m_memmap->addr_to_ptr(block->start);
if (!data_ptr) {
VLOG(VLOG_CFG) << "Failed to find memory for block: 0x" << std::hex << block->start;
return 1;
}
// Check if we walked into a block that was in the queue at the time of branch checks
// So we have to ensure it was not already processed here, check for the split then fix up
// the old block and continue on with this new block. This is a duplication of work slightly.
auto start_it = m_blocks.find(make_range(block->start));
if (start_it != m_blocks.end()) {
if (start_it->first.first != block->start) {
bool call_target = block->start == block->func_addr;
// Skip found blocks that split our existing CFG's blocks
if (call_target && block->metadata.count(bb_metadata::FOUND_BLOCK)) {
return 1;
}
this->split_block(block->start, &start_it->second, call_target);
return 1;
// Continue on processing this block.
}
else if (start_it->first.first == block->start) {
VLOG(VLOG_CFG) << "Attempted to double parse block: 0x" << std::hex << block->start;
return 1;
}
}
block->data = data_ptr;
VLOG(VLOG_CFG) << "Processing block: 0x" << std::hex << block->start;
Branch target;
std::vector<Branch> guesses;
uint64_t block_start = block->start;
uint64_t max_block_size = MAX_BLOCK_SIZE;
cs_insn *insn;
insn = cs_malloc(m_cs_handle);
cs_option(m_cs_handle, CS_OPT_MODE, block->mode);
cs_option(m_cs_handle, CS_OPT_DETAIL, CS_OPT_ON);
bool got_delay = false;
while(cs_disasm_iter(m_cs_handle, &data_ptr, &max_block_size, &block_start, insn)) {
VLOG(VLOG_CFG) << "0x" << std::hex << insn->address << ": " << insn->mnemonic << " " << insn->op_str;
this->find_guesses(insn, block, &guesses);
m_state->at_insn(insn, block);
m_events->run_events(event_type::INSN, m_state, block, insn);
// If we hit a block we already knew about fall through and mark it a follower.
uint64_t next_insn_addr = insn->address + insn->size;
if (m_bitmap->get_bit(next_insn_addr, MapFlag::BLOCK)) {
break;
}
if (got_delay) {
break;
}
if (!this->is_branch(insn)) {
continue;
}
if (m_state->has_delay_slot()) {
this->find_branch_targets(insn, block, &target);
got_delay = true;
continue;
}
break;
}
// Invalid block, we did not disassemble any instructions.
if (block->data == data_ptr) {
cs_free(insn, 1);
return 1;
}
block->end = insn->address + insn->size;
// Check that this block is not a failure, bail if it is
if (m_blocks.count(make_range(block->start, block->end - 1))) {
// LOG(ERROR) << "Invalid block of range: 0x" << std::hex << block->start << " : 0x" << block->end;
cs_free(insn, 1);
return 1;
}
if (!m_state->has_delay_slot()) {
this->find_branch_targets(insn, block, &target);
}
cs_free(insn, 1);
this->process_guesses(&guesses, block);
// Check the Branch's target, determine if a fallthrough should be queued and if the branch target should.
bool queue_branch = false;
bool queue_fallthrough = false;
do {
switch (target.type) {
case branch_type::GUESS:
queue_branch = false;
queue_fallthrough = false;
break;
case branch_type::CONDITIONAL:
queue_branch = true;
queue_fallthrough = true;
break;
case branch_type::UNCONDITIONAL:
queue_branch = true;
queue_fallthrough = false;
break;
case branch_type::CALL:
queue_branch = true;
queue_fallthrough = true;
break;
case branch_type::FALLTHRU:
LOG(FATAL) << "Fallthrough should never be generated by process_guesses!";
break;
};
if (!target.addr) {
break;
}
if (queue_branch) {
block->branch_target = target.addr;
auto it = m_blocks.find(make_range(target.addr));
if (it != m_blocks.end()) {
if (it->first.first != target.addr) {
bool call_target = target.type == branch_type::CALL;
this->split_block(target.addr, &it->second, call_target);
}
else {
VLOG(VLOG_CFG) << "Skipping duplicate target 0x" << std::hex << target.addr;
if (target.type == branch_type::CALL) {
it->second.callers.emplace_back(block->start);
}
else if (target.type == branch_type::CONDITIONAL || target.type == branch_type::UNCONDITIONAL) {
it->second.leaders.emplace_back(block->start);
}
block->followers.emplace_back(target.addr);
if (this->is_no_exit_sym(target.addr)) {
queue_fallthrough = false;
}
}
break;
}
const Symbol *sym;
if (m_resolver->resolve_sym(target.addr, &sym) && sym->type == sym_type::IMPORT) {
VLOG(VLOG_CFG) << "Skipping target: 0x" << std::hex << target.addr << " : " << sym->name;
m_events->run_events(event_type::SYM_BRANCH, m_state, block, insn, sym);
if (this->is_no_exit_sym(target.addr)) {
VLOG(VLOG_CFG) << "Skipping fallthrough on symbol that does not return";
queue_fallthrough = false;
}
// Extra checks for special functions
if (sym->name == "__libc_start_main") {
auto main_ptr_res = this->libc_start_main_helper(insn, block);
if (main_ptr_res) {
VLOG(VLOG_CFG) << "Found main ptr: 0x" << std::hex << *main_ptr_res;
auto main_branch = Branch(*main_ptr_res, branch_type::CALL, guess_type::LIBC_START);
main_branch.mode = block->mode;
this->add_block(main_branch, block);
}
}
break;
}
if(!m_memmap->is_text_sec(target.addr)) {
VLOG(VLOG_CFG) << "Skipping target 0x" << std::hex << target.addr << " because its not a TEXT section";
break;
}
this->add_block(target, block);
}
} while (false);
do {
if (queue_fallthrough) {
uint64_t fallthru_start = block->end;
auto it = m_blocks.find(make_range(fallthru_start));
if (it != m_blocks.end()) {
VLOG(VLOG_CFG) << "Skipping duplicate target 0x" << std::hex << fallthru_start;
break;
}
block->followers.emplace_back(fallthru_start);
auto fallbranch = Branch(fallthru_start, branch_type::FALLTHRU, guess_type::UNKNOWN);
fallbranch.mode = block->mode;
this->add_block(fallbranch, block);
}
} while (false);
return 0;
}
void Cfg::split_block(uint64_t split_addr, Block *cur_block, bool call_target) {
// Split the block into two new blocks
VLOG(VLOG_CFG) << "Splitting block: 0x" << std::hex << cur_block->start << " : 0x" << cur_block->end << " at addr: 0x" << std::hex << split_addr;
CHECK(cur_block) << "Invalid block pointer passed to split_block()";
Block block_1 = std::move(*cur_block);
m_blocks.erase(make_range(block_1.start));
uint64_t original_end = block_1.end;
std::vector<uint64_t> copy_followers;
uint64_t orig_btarget = block_1.branch_target;
if (block_1.followers.size()) {
copy_followers = block_1.followers;
block_1.followers.clear();
}
// If we are splitting a call target, discard the prior head of the block
if (!call_target) {
block_1.end = split_addr;
block_1.followers.emplace_back(split_addr);
auto ret_pair = m_blocks.emplace(make_range(block_1.start, block_1.end - 1), block_1);
if (!ret_pair.second) {
LOG(ERROR) << "Failed to emplace 1st splitted block: 0x" << std::hex << block_1.start;
}
}
else {
m_bitmap->clear_bit_range(block_1.start, (split_addr - block_1.start));
}
// Make the second block:
auto block_2 = Block(split_addr);
block_2.mode = block_1.mode;
block_2.end = original_end;
block_2.func_addr = block_1.func_addr; // if call target, this will be swapped in the lambda
if (call_target) {
block_2.is_func_head = true;
}
else {
block_2.func_addr = block_1.func_addr;
block_2.leaders.emplace_back(block_1.start);
}
CHECK( (split_addr - block_1.start) > 0 ) << "Invalid block being split, block_1.end less than split addr, end: 0x"
<< std::hex << block_1.end << " split_addr: 0x" << split_addr;
block_2.data = block_1.data + (split_addr - block_1.start);
block_2.branch_target = orig_btarget;
block_2.followers = copy_followers;
auto block_1_meta = block_1.metadata;
block_1.metadata.clear();
for (const auto &meta : block_1_meta) {
if (meta.second.addr >= split_addr) {
block_2.metadata.emplace(meta);
}
else {
block_1.metadata.emplace(meta);
}
}
// Fix up the followers, leader addresses
for (const auto &follower : block_2.followers) {
auto follow_it = m_blocks.find(make_range(follower));
if (follow_it == m_blocks.end()) {
this->add_fixup({follower, block_1.start, block_2.start, fixup_type::LEADER_REPLACE});
continue;
}
follow_it->second.leaders.erase(std::remove(follow_it->second.leaders.begin(), follow_it->second.leaders.end(), block_1.start), follow_it->second.leaders.end());
follow_it->second.leaders.emplace_back(block_2.start);
}
auto ret_pair_2 = m_blocks.emplace(make_range(block_2.start, block_2.end - 1), block_2);
if (!ret_pair_2.second) {
LOG(ERROR) << "Failed to emplace 2nd splitted block: 0x" << std::hex << block_2.start;
}
else {
if (call_target) {
ret_pair_2.first->second.func_addr = block_2.start;
}
}
// Since we are are creating a new function out of a split, we need to mark all
// the existing blocks in the function as under this new function.
if (call_target) {
std::function<void(Block *, uint64_t, uint64_t)> change_func;
// Use a recusing lambda to quickly mark all the blocks in the function
change_func = [this, &change_func](Block *block, uint64_t func_addr, uint64_t old_addr)->void {
if (block->func_addr != old_addr) {
return;
}
block->func_addr = func_addr;
VLOG(VLOG_CFG) << "Recursive change func_addr: 0x" << std::hex << old_addr << " -> 0x" << func_addr << " block: 0x" << block->start;
for (const auto &follower : block->followers) {
auto it = m_blocks.find(make_range(follower));
if (it != m_blocks.end()) {
change_func(&it->second, func_addr, old_addr);
}
}
};
change_func(&block_2, block_2.start, block_1.func_addr);
}
// Fix up any switch tables linked to the split block
auto switch_tables = m_state->get_switch_tables();
CHECK(switch_tables) << "Invalid switch tables pointer";
for (auto &kv : *switch_tables) {
if (kv.second != block_1.start) {
continue;
}
VLOG(VLOG_CFG) << "Replaced split block switch table entry: 0x" << std::hex << kv.first;
kv.second = block_2.start;
break;
}
}
void Cfg::add_block(Cfg::Branch target, Block *cur_block) {
Block new_block(target.addr);
new_block.mode = target.mode;
if (target.type == branch_type::CALL) {
VLOG(VLOG_CFG) << "Pushing CALL into func queue: 0x" << std::hex << target.addr;
new_block.callers.emplace_back(cur_block->start);
// Check if we already have this new call block in the block queue and remove it.
// functions should take priority. TODO: this will break the linkages for the created block.
if (m_block_queue.in_queue(new_block)) {
m_block_queue.del_elm(new_block);
VLOG(VLOG_CFG) << "Call branch 0x" << std::hex << cur_block->start << " already in block queue, deleted from block queue";
}
if (!m_func_queue.push(new_block)) {
VLOG(VLOG_CFG) << "Tried to push duplicate CALL block: 0x" << std::hex << target.addr;
this->add_fixup({target.addr, cur_block->start, fixup_type::CALLER});
}
}
else if (target.type == branch_type::CONDITIONAL || target.type == branch_type::UNCONDITIONAL) {
new_block.func_addr = cur_block->func_addr;
new_block.leaders.emplace_back(cur_block->start);
if (m_func_queue.in_queue(new_block)) {
VLOG(VLOG_CFG) << "Unconditional branch 0x" << std::hex << new_block.start << " already in function queue";
return;
}
VLOG(VLOG_CFG) << "Pushing COND/UNCOND branch: 0x" << std::hex << target.addr;
if (!m_block_queue.push(new_block)) {
VLOG(VLOG_CFG) << "Tried to push duplicate fallthru block: 0x" << std::hex << new_block.start;
this->add_fixup({target.addr, cur_block->start, fixup_type::LEADER});
}
cur_block->followers.emplace_back(new_block.start);
}
else if (target.type == branch_type::FALLTHRU) {
new_block.func_addr = cur_block->func_addr;
new_block.leaders.emplace_back(cur_block->start);
if (m_func_queue.in_queue(new_block)) {
VLOG(VLOG_CFG) << "Fallthru branch 0x" << std::hex << new_block.start << " already in function queue";
return;
}
VLOG(VLOG_CFG) << "Pushing fallthru: 0x" << std::hex << new_block.start;
if (!m_block_queue.push(new_block)) {
VLOG(VLOG_CFG) << "Tried to push duplicate fallthru block: 0x" << std::hex << new_block.start;
this->add_fixup({new_block.start, cur_block->start, fixup_type::LEADER});
}
}
else {
LOG(FATAL) << "Invalid Branch type: " << static_cast<int>(target.type);
}
}
void Cfg::add_fixup(Fixup fixup) {
auto fix_it = m_fixups.find(fixup.block);
if (fix_it == m_fixups.end()) {
m_fixups[fixup.block] = {fixup};
}
else {
fix_it->second.emplace_back(fixup);
}
}
void Cfg::process_fixups(Block *block) {
CHECK(block) << "Invalid block passed to process_fixups";
auto fix_it = m_fixups.find(block->start);
if (fix_it == m_fixups.end()) {
return;
}
for (const auto &fixup : fix_it->second) {
switch (fixup.type) {
case fixup_type::CALLER:
block->callers.emplace_back(fixup.value);
VLOG(VLOG_CFG) << "Fixing up caller on block: 0x" << std::hex << block->start << " val: 0x" << fixup.value;
break;
case fixup_type::LEADER:
block->leaders.emplace_back(fixup.value);
VLOG(VLOG_CFG) << "Fixing up leader on block: 0x" << std::hex << block->start << " val: 0x" << fixup.value;
break;
case fixup_type::LEADER_REPLACE:
std::replace(block->leaders.begin(), block->leaders.end(), fixup.value, fixup.value_2);
break;
case fixup_type::FOLLOWER:
block->followers.emplace_back(fixup.value);
VLOG(VLOG_CFG) << "Fixing up follower on block: 0x" << std::hex << block->start << " val: 0x" << fixup.value;
break;
default:
LOG(WARNING) << "Unsupported fixup type: " << static_cast<uint32_t>(fixup.type);
break;
}
}
m_fixups.erase(fix_it);
}
void Cfg::find_guesses(cs_insn *insn, Block *block, std::vector<Branch> *targets) {
guess_type g_type = guess_type::UNKNOWN;
uint64_t target = 0;
switch (m_arch) {
case cs_arch::CS_ARCH_X86: {
cs_x86 x86 = insn->detail->x86;
switch (insn->id) {
case X86_INS_LEA: {
if (x86.op_count < 2) {
LOG(FATAL) << "Invalid x86 LEA at: 0x" << std::hex << insn->address;
}
cs_x86_op op1 = x86.operands[1];
auto op1_res = m_state->get_op_read_addr(insn, op1, block->mode);
if (!op1_res) {
break;
}
target = *op1_res;
g_type = guess_type::X86_LEA;
break;
}
case X86_INS_PUSH: {
if (x86.op_count < 1) {
LOG(FATAL) << "Invalid x86 PUSH at: 0x" << std::hex << insn->address;
}
cs_x86_op op0 = x86.operands[0];
auto op0_res = m_state->get_op_val(insn, op0, block->mode);
if (!op0_res) {
break;
}
target = *op0_res;
g_type = guess_type::X86_PUSH;
break;
}
case X86_INS_MOV: {
if (x86.op_count < 2) {
LOG(FATAL) << "Invalid x86 MOV at: 0x" << std::hex << insn->address;
}
cs_x86_op op0 = x86.operands[0];
cs_x86_op op1 = x86.operands[1];
auto op1_res = m_state->get_op_val(insn, op1, block->mode);
if (!op1_res) {
break;
}
target = *op1_res;
g_type = guess_type::X86_MOV;
break;
}
}
break;
}
case cs_arch::CS_ARCH_ARM: {
cs_arm arm = insn->detail->arm;
switch (insn->id) {
case ARM_INS_LDR: {
if (arm.op_count < 2) {
LOG(FATAL) << "Invalid arm LDR instruction at: 0x" << std::hex << insn->address;
}
cs_arm_op op0 = arm.operands[0];
cs_arm_op op1 = arm.operands[1];
auto op1_res = m_state->get_op_val(insn, op1, block->mode);
if (!op1_res) {
break;
}
int32_t op1_val = *op1_res;
// Check for relative loads and write off those to the bitmap.
if (op1.type == ARM_OP_MEM && op1.mem.base == ARM_REG_PC) {
uint32_t read_addr = m_state->get_arm_pc_val(insn, op1, block->mode);
if (read_addr) {
if (block->mode == cs_mode::CS_MODE_THUMB && read_addr % 2 != 0) {
// Possible thumb function address, skip it.
}
else {
m_bitmap->set_bit_range(read_addr, 4, MapFlag::READ);
}
}
}
if (op1_val > 0 && !m_state->is_op_stack_based(op1)) {
target = op1_val;
g_type = guess_type::ARM_LDR;
}
break;
}
}
break;
}
case cs_arch::CS_ARCH_ARM64: {
cs_arm64 arm = insn->detail->arm64;
switch (insn->id) {
case ARM64_INS_LDR: {
if (arm.op_count < 2) {
LOG(FATAL) << "Invalid arm64 LDR at: 0x" << std::hex << insn->address;
}
cs_arm64_op op0 = arm.operands[0];
cs_arm64_op op1 = arm.operands[1];
auto op1_res = m_state->get_op_val(insn, op1);
if (!op1_res) {
break;
}
if (*op1_res > 0 && !m_state->is_op_stack_based(op1)) {
target = *op1_res;
g_type = guess_type::ARM_LDR;
}
}
}
break;
}
case cs_arch::CS_ARCH_MIPS: {
cs_mips mips = insn->detail->mips;
switch (insn->id) {
case MIPS_INS_LW: {
if (mips.op_count < 2) {
LOG(FATAL) << "Invalid op count for MIPS lw instruction at: 0x" << std::hex << insn->address;
}
cs_mips_op op0 = mips.operands[0];
cs_mips_op op1 = mips.operands[1];
auto op1_res = m_state->get_op_val(insn, op1, block);
if (!op1_res) {
break;
}
if (*op1_res) {
target = *op1_res;
g_type = guess_type::MIPS_LW;
}
break;
}
}
break;
}
case cs_arch::CS_ARCH_PPC: {
// TODO: PPC guesses
break;
}
default:
LOG(FATAL) << "Invalid CPU arch: " << m_arch;
break;
}
if (target && g_type != guess_type::UNKNOWN) {
VLOG(VLOG_REG) << "--target: 0x" << std::hex << target;
targets->emplace_back(target, branch_type::GUESS, g_type);
}
}
int Cfg::process_guesses(std::vector<Cfg::Branch> *guesses, Block *block) {
// Check each found addr in the block body for being a possible function entry and push it.
for (Branch &guess : *guesses) {
// Check if the current block is the ep func, and we are ELF
// Because most elf binaries start by calling a libc start function with the main as a arg
// either LEA'd or MOV'd into a register in the block.
bool allow_guess = false;
if (guess.g_type == guess_type::X86_PUSH) {
if (block->mode == cs_mode::CS_MODE_32) {
allow_guess = true;
}
}
if (m_obj->isELF()) {
if (m_ep == block->func_addr) {
switch (guess.g_type) {
case guess_type::X86_MOV:
case guess_type::X86_LEA:
case guess_type::ARM_LDR:
case guess_type::MIPS_LW:
allow_guess = true;
break;
default:
break;
}
}
}
else if (m_obj->isCOFF() || m_obj->isMachO()) {
switch (guess.g_type) {
case guess_type::X86_PUSH:
allow_guess = true;
break;
default:
break;
}
}
guess.mode = block->mode;
// Fix up guess block addresses
if (m_arch == cs_arch::CS_ARCH_ARM) {
if (guess.addr % 2 != 0) {
guess.addr -= 1;
guess.mode = cs_mode::CS_MODE_THUMB;
}
}
// Skip this guess
if (!allow_guess) {
continue;
}
// Avoid splitting our own block
if (guess.addr >= block->start && guess.addr < block->end) {
continue;
}
// Now that we found a valid guess, try and queue it.
auto it = m_blocks.find(make_range(guess.addr));
if (it != m_blocks.end()) {
VLOG(VLOG_CFG) << "Skipping duplicate GUESS 0x" << std::hex << guess.addr;
continue;
}
if(!m_memmap->is_text_sec(guess.addr)) {
VLOG(VLOG_CFG) << "Skipping GUESS 0x" << std::hex << guess.addr << " because its not a TEXT section";
continue;
}
const Symbol *sym;
if (m_resolver->resolve_sym(guess.addr, &sym) && sym->type == sym_type::IMPORT) {
VLOG(VLOG_CFG) << "Skipping GUESS: 0x" << std::hex << guess.addr << " : " << sym->name;
continue;
}
if (block->metadata.count(bb_metadata::SWITCH_INDIRECT)) {
auto lea_meta = block->metadata.find(bb_metadata::LOAD);
if (lea_meta != block->metadata.end()) {
if (lea_meta->second.value == guess.addr) {
VLOG(VLOG_CFG) << "Skipping GUESS: 0x" << std::hex << guess.addr << " because it is a switch vtable load";
continue;
}
}
}
VLOG(VLOG_CFG) << "Pushing GUESS into func queue: 0x" << std::hex << guess.addr;
Block guess_block(guess.addr);
guess_block.mode = guess.mode;
if (!m_func_queue.push(guess_block)) {
VLOG(VLOG_CFG) << "Tried to push duplicate GUESS block: 0x" << std::hex << guess.addr;
}
}
return 0;
}
int Cfg::find_branch_targets(cs_insn *insn, Block *block, Branch *b_target) {
cs_detail *detail = insn->detail;
branch_type b_type = branch_type::UNCONDITIONAL;
bool find_op = false;
uint8_t op_idx = 0;
uint64_t target = 0;
cs_mode new_mode;
bool switch_mode = false;
if (m_arch == cs_arch::CS_ARCH_X86) {
cs_x86 x86 = insn->detail->x86;
new_mode = block->mode;
switch (insn->id) {
case X86_INS_CALL:
b_type = branch_type::CALL;
find_op = true;
op_idx = 0;
break;
case X86_INS_SYSENTER:
case X86_INS_SYSCALL:
case X86_INS_INT:
b_type = branch_type::CALL;
break;
case X86_INS_LOOP:
case X86_INS_LOOPE:
case X86_INS_LOOPNE:
find_op = true;
op_idx = 0;
b_type = branch_type::CONDITIONAL;
}
if (!find_op) {
for (uint8_t i = 0; i < detail->groups_count; i++) {
uint8_t grp = detail->groups[i];
if (grp == X86_GRP_JUMP) {
// Mark static jumps as targets.
// All dependent jumps should be fallthroughs
if (insn->id == X86_INS_JMP) {
b_type = branch_type::UNCONDITIONAL;
}
else {
b_type = branch_type::CONDITIONAL;
}
find_op = true;
break;
}
}
}
if (find_op) {
if (op_idx >= x86.op_count) {
LOG(FATAL) << "BAD instruction hit at: 0x" << std::hex << insn->address;
}
CfgRes<uint64_t> oper_res;
do {
cs_x86_op op = x86.operands[op_idx];
// Special case for x86 because we need to symbolize the call target address, not the value
if ( (insn->id == X86_INS_CALL || insn->id == X86_INS_JMP) && op.type == X86_OP_MEM) {
oper_res = m_state->get_op_read_addr(insn, op, new_mode);
break;
}
oper_res = m_state->get_op_val(insn, x86.operands[op_idx], new_mode);
break;
} while(false);
if (oper_res) {
target = *oper_res;
}
}
}
else if (m_arch == cs_arch::CS_ARCH_ARM) {
cs_arm arm = insn->detail->arm;
bool check_cc = false;
for (uint8_t i = 0; i < detail->groups_count; i++) {
uint8_t grp = detail->groups[i];
if (grp == ARM_GRP_JUMP) {
check_cc = true;
find_op = true;
op_idx = 0;
break;
}
}
switch (insn->id) {
case ARM_INS_POP:
if (is_pc_in_arm_ops(arm)) {
check_cc = true;
}
break;
case ARM_INS_TBH:
case ARM_INS_TBB:
// ignore switch table instructions.
find_op = false;
break;
case ARM_INS_BX:
case ARM_INS_BLX:
// Check if we are on a switch branch and ignore the op.
if (arm.op_count < 1) {
LOG(FATAL) << "Invalid arm branch instruction at: 0x" << std::hex << insn->address;
}
if (arm.operands[0].type == ARM_OP_REG) {
if (block->metadata.count(bb_metadata::SWITCH_INDIRECT)) {
find_op = false;
}
}
switch_mode = true;
break;
case ARM_INS_ADD:
// WARN: Special case bypass here, because we cannot directly query the PC register we calculate the
// target value and then disable find_op to fall though.
if (arm.op_count == 3 && arm.operands[0].type == ARM_OP_REG && arm.operands[0].reg == ARM_REG_PC) {
find_op = false;
b_type = branch_type::UNCONDITIONAL;
auto op1_res = m_state->get_op_val(insn, arm.operands[1], block->mode);
auto op2_res = m_state->get_op_val(insn, arm.operands[2], block->mode);
if (op1_res && op2_res) {
target = *op1_res + *op2_res;
}
}
break;
}
if (check_cc) {
if (arm.cc == ARM_CC_AL || arm.cc == ARM_CC_INVALID) {
if (insn->id == ARM_INS_BL) {
b_type = branch_type::CALL;
}
else if (insn->id == ARM_INS_BLX) {
b_type = branch_type::CALL;
}
else if (insn->id == ARM_INS_CBZ || insn->id == ARM_INS_CBNZ) {
b_type = branch_type::CONDITIONAL;
op_idx = 1;
}
else {
b_type = branch_type::UNCONDITIONAL;
}
}
else {
b_type = branch_type::CONDITIONAL;
}
}
if (find_op) {
if (op_idx >= arm.op_count) {
LOG(FATAL) << "BAD instruction hit at: 0x" << std::hex << insn->address;
}
auto oper_res = m_state->get_op_val(insn, arm.operands[op_idx], block->mode);
if (oper_res) {
target = *oper_res;
}
}
}
else if (m_arch == cs_arch::CS_ARCH_ARM64) {
cs_arm64 arm = insn->detail->arm64;
bool check_cc = false;
for (uint8_t i = 0; i < detail->groups_count; i++) {
uint8_t grp = detail->groups[i];
if (grp == ARM64_GRP_JUMP) {
check_cc = true;
find_op = true;
op_idx = 0;
break;
}
}
switch (insn->id) {
case ARM64_INS_BL:
case ARM64_INS_BLR:
check_cc = true;
find_op = true;
op_idx = 0;
break;
case ARM64_INS_SVC:
b_type = branch_type::CALL;
break;
default:
break;
}
if (check_cc) {
if (arm.cc == ARM64_CC_AL || arm.cc == ARM64_CC_INVALID) {
switch (insn->id) {
case ARM64_INS_BL:
case ARM64_INS_BLR:
b_type = branch_type::CALL;
break;
case ARM64_INS_CBZ:
case ARM64_INS_CBNZ:
b_type = branch_type::CONDITIONAL;
op_idx = 1;
break;
case ARM64_INS_TBZ:
case ARM64_INS_TBNZ:
b_type = branch_type::CONDITIONAL;
op_idx = 2;
break;
case ARM64_INS_B:
// Check if this looks like a linking branch
if (block->metadata.count(bb_metadata::SAVE_LINK_REG)) {
b_type = branch_type::CALL;
}
break;
default:
b_type = branch_type::UNCONDITIONAL;
}
}
else {
b_type = branch_type::CONDITIONAL;
}
}
if (find_op) {
if (op_idx >= arm.op_count) {
LOG(FATAL) << "BAD instruction hit at: 0x" << std::hex << insn->address;
}
auto oper_res = m_state->get_op_val(insn, arm.operands[op_idx]);
if (oper_res) {
target = *oper_res;
}
}
}
else if (m_arch == cs_arch::CS_ARCH_MIPS) {
cs_mips mips = insn->detail->mips;
for (uint8_t i = 0; i < detail->groups_count; i++) {
uint8_t grp = detail->groups[i];
if (grp == MIPS_GRP_JUMP) {
find_op = true;
op_idx = 0;
break;
}
}
switch (insn->id) {
case MIPS_INS_BAL:
case MIPS_INS_JAL:
case MIPS_INS_JALR:
b_type = branch_type::CALL;
op_idx = 0;
find_op = true;
break;
case MIPS_INS_BLTZAL:
case MIPS_INS_BGEZAL:
b_type = branch_type::CALL;
op_idx = 1;
find_op = true;
break;
case MIPS_INS_SYSCALL:
b_type = branch_type::CALL;
find_op = false;
break;
case MIPS_INS_B:
case MIPS_INS_J:
case MIPS_INS_JR:
b_type = branch_type::UNCONDITIONAL;
op_idx = 0;
find_op = true;
break;
case MIPS_INS_BC1F:
case MIPS_INS_BC1T:
b_type = branch_type::CONDITIONAL;
op_idx = 0;
find_op = true;
break;
case MIPS_INS_BEQZ:
case MIPS_INS_BGEZ:
case MIPS_INS_BGTZ:
case MIPS_INS_BLEZ:
case MIPS_INS_BLTZ:
case MIPS_INS_BNEZ:
b_type = branch_type::CONDITIONAL;
op_idx = 1;
find_op = true;
break;
case MIPS_INS_BEQ:
case MIPS_INS_BNE:
b_type = branch_type::CONDITIONAL;
op_idx = 2;
find_op = true;
break;
}
if (find_op) {
if (op_idx >= mips.op_count) {
LOG(FATAL) << "BAD instruction hit at: 0x" << std::hex << insn->address;
}
auto oper_res = m_state->get_op_val(insn, mips.operands[op_idx], block);
if (oper_res) {
target = *oper_res;
}
}
}
else if (m_arch == cs_arch::CS_ARCH_PPC) {
cs_ppc ppc = insn->detail->ppc;
for (uint8_t i = 0; i < detail->groups_count; i++) {
uint8_t grp = detail->groups[i];
if (grp == PPC_GRP_JUMP) {
find_op = true;
op_idx = 0;
break;
}
}
switch (insn->id) {
// Conditional / unsupported / B(uncond)
case PPC_INS_B:
// Special case for BEQ because capstone does not have BEQ instructions
if (ppc.bc != ppc_bc::PPC_BC_INVALID) {
b_type = branch_type::CONDITIONAL;
if (ppc.op_count > 1) {
op_idx = 1;
} else {
op_idx = 0;
}
find_op = true;
}
else {
b_type = branch_type::UNCONDITIONAL;
op_idx = 0;
find_op = true;
}
break;
// Linking branches (calls)
case PPC_INS_BL:
case PPC_INS_BLA:
b_type = branch_type::CALL;
op_idx = 0;
find_op = true;
break;
case PPC_INS_BLR:
if (ppc.bc != ppc_bc::PPC_BC_INVALID) {
b_type = branch_type::CONDITIONAL;
find_op = false;
}
break;
case PPC_INS_BCL:
case PPC_INS_BCLA:
case PPC_INS_BCLR:
case PPC_INS_BCLRL:
b_type = branch_type::CALL;
op_idx = 2;
find_op = true;
break;
// Conditional jumps
case PPC_INS_BC:
b_type = branch_type::CONDITIONAL;
op_idx = 2;
find_op = true;
break;
case PPC_INS_BDZ:
case PPC_INS_BDZA:
case PPC_INS_BDZL:
case PPC_INS_BDZLA:
case PPC_INS_BDNZ:
case PPC_INS_BDNZA:
case PPC_INS_BDNZL:
case PPC_INS_BDNZLA:
b_type = branch_type::CONDITIONAL;
op_idx = 0;
find_op = true;
break;
// Conditional, no operand jumps
case PPC_INS_BDZLR:
case PPC_INS_BDZLRL:
case PPC_INS_BDNZLR:
case PPC_INS_BDNZLRL:
b_type = branch_type::CONDITIONAL;
find_op = false;
break;
// Special case indirect branches
// TODO: Look up the CTR register for a possible indirect target
case PPC_INS_BCCTR:
b_type = branch_type::CONDITIONAL;
find_op = false;
break;
case PPC_INS_BCTR:
b_type = branch_type::UNCONDITIONAL;
find_op = false;
break;
case PPC_INS_BCTRL:
b_type = branch_type::CALL;
find_op = false;
break;
}
if (find_op) {
CHECK(op_idx < ppc.op_count) << "BAD instruction hit at: 0x" << std::hex << insn->address;
auto oper_res = m_state->get_op_val(insn, ppc.operands[op_idx], block);
if (oper_res) {
target = *oper_res;
}
// Skip branch forward by 1 instruction
if (target == insn->address + 4) {
target = 0x0;
}
}
}
else {
LOG(FATAL) << "Unsupported arch: " << m_arch;
}
if (!target) {
b_target->addr = 0;
}
// Prevent splitting our own block.
if (target >= block->start && target < block->end) {
b_target->addr = 0;
}
// Block targets that are the current block.
else if (target == block->start) {
b_target->addr = 0;
}
else {
b_target->addr = target;
}
b_target->type = b_type;
b_target->mode = block->mode;
if (switch_mode) {
VLOG(VLOG_CFG) << "trying to switch modes, addr: 0x" << std::hex << b_target->addr;
if (block->mode == cs_mode::CS_MODE_ARM) {
if (b_target->addr % 2 != 0) {
b_target->addr -= 1;
}
b_target->mode = cs_mode::CS_MODE_THUMB;
}
else if (block->mode == cs_mode::CS_MODE_THUMB) {
if (b_target->addr % 2 != 0) {
b_target->addr -= 1;
b_target->mode = cs_mode::CS_MODE_THUMB;
}
else {
b_target->mode = cs_mode::CS_MODE_ARM;
}
}
}
return 0;
}
bool Cfg::is_branch(cs_insn *insn) const {
cs_detail *detail = insn->detail;
if (m_arch == cs_arch::CS_ARCH_X86) {
if (detail->groups_count > 0) {
for (uint8_t i = 0; i < detail->groups_count; i++) {
uint8_t grp = detail->groups[i];
switch (grp) {
case X86_GRP_JUMP:
case X86_GRP_INT:
case X86_GRP_IRET:
case X86_GRP_RET:
case X86_GRP_BRANCH_RELATIVE:
return true;
default:
continue;
}
}
}
switch (insn->id) {
case X86_INS_CALL:
case X86_INS_HLT:
case X86_INS_SYSENTER:
case X86_INS_SYSCALL:
case X86_INS_LOOP:
case X86_INS_LOOPE:
case X86_INS_LOOPNE:
case X86_INS_UD2:
return true;
default:
break;
}
}
else if (m_arch == cs_arch::CS_ARCH_ARM) {
if (detail->groups_count > 0) {
for (uint8_t i = 0; i < detail->groups_count; i++) {
uint8_t grp = detail->groups[i];
switch (grp) {
case ARM_GRP_JUMP:
case ARM_GRP_CALL:
case ARM_GRP_INT:
case ARM_GRP_BRANCH_RELATIVE:
return true;
default:
continue;
}
}
}
cs_arm arm = insn->detail->arm;
switch (insn->id) {
case ARM_INS_SVC:
return true;
case ARM_INS_LDMDB:
case ARM_INS_POP:
case ARM_INS_LDM:
return is_pc_in_arm_ops(arm);
case ARM_INS_LDR:
case ARM_INS_MOV:
if (arm.op_count > 0) {
if (arm.operands[0].reg == ARM_REG_PC) {
return true;
}
}
break;
case ARM_INS_ADD:
if (arm.op_count == 3) {
if (arm.operands[0].type == ARM_OP_REG && arm.operands[0].reg == ARM_REG_PC) {
return true;
}
}
break;
default:
break;
}
}
else if (m_arch == cs_arch::CS_ARCH_ARM64) {
if (detail->groups_count > 0) {
for (uint8_t i = 0; i < detail->groups_count; i++) {
uint8_t grp = detail->groups[i];
switch (grp) {
case ARM64_GRP_JUMP:
case ARM64_GRP_CALL:
case ARM64_GRP_INT:
case ARM64_GRP_BRANCH_RELATIVE:
return true;
default:
continue;
}
}
}
switch (insn->id) {
case ARM64_INS_BLR:
case ARM64_INS_BL:
case ARM64_INS_SVC:
case ARM64_INS_RET:
return true;
}
}
else if (m_arch == cs_arch::CS_ARCH_MIPS) {
if (detail->groups_count > 0) {
for (uint8_t i = 0; i < detail->groups_count; i++) {
uint8_t grp = detail->groups[i];
switch (grp) {
case MIPS_GRP_JUMP:
case MIPS_GRP_CALL:
case MIPS_GRP_INT:
case MIPS_GRP_BRANCH_RELATIVE:
return true;
default:
continue;
}
}
}
switch (insn->id) {
case MIPS_INS_JALR:
case MIPS_INS_JAL:
case MIPS_INS_J:
case MIPS_INS_B:
case MIPS_INS_BAL:
case MIPS_INS_SYSCALL:
return true;
}
}
else if (m_arch == cs_arch::CS_ARCH_PPC) {
if (detail->groups_count > 0) {
for (uint8_t i = 0; i < detail->groups_count; i++) {
uint8_t grp = detail->groups[i];
switch (grp) {
case PPC_GRP_JUMP:
return true;
default:
continue;
}
}
}
switch (insn->id) {
case PPC_INS_SC:
case PPC_INS_B:
case PPC_INS_BA:
case PPC_INS_BC:
case PPC_INS_BCCTR:
case PPC_INS_BCCTRL:
case PPC_INS_BCL:
case PPC_INS_BCLR:
case PPC_INS_BCLRL:
case PPC_INS_BCTR:
case PPC_INS_BCTRL:
case PPC_INS_BCT:
case PPC_INS_BDNZ:
case PPC_INS_BDNZA:
case PPC_INS_BDNZL:
case PPC_INS_BDNZLA:
case PPC_INS_BDNZLR:
case PPC_INS_BDNZLRL:
case PPC_INS_BDZ:
case PPC_INS_BDZA:
case PPC_INS_BDZL:
case PPC_INS_BDZLA:
case PPC_INS_BDZLR:
case PPC_INS_BDZLRL:
case PPC_INS_BL:
case PPC_INS_BLA:
case PPC_INS_BLR:
case PPC_INS_BLRL:
case PPC_INS_BRINC:
case PPC_INS_BCA:
case PPC_INS_BCLA:
case PPC_INS_BTA:
case PPC_INS_BT:
case PPC_INS_BF:
case PPC_INS_BDNZT:
case PPC_INS_BDNZF:
case PPC_INS_BDZF:
case PPC_INS_BDZT:
case PPC_INS_BFA:
case PPC_INS_BDNZTA:
case PPC_INS_BDNZFA:
case PPC_INS_BDZTA:
case PPC_INS_BDZFA:
case PPC_INS_BTCTR:
case PPC_INS_BFCTR:
case PPC_INS_BTCTRL:
case PPC_INS_BFCTRL:
case PPC_INS_BTL:
case PPC_INS_BFL:
case PPC_INS_BDNZTL:
case PPC_INS_BDNZFL:
case PPC_INS_BDZTL:
case PPC_INS_BDZFL:
case PPC_INS_BTLA:
case PPC_INS_BFLA:
case PPC_INS_BDNZTLA:
case PPC_INS_BDNZFLA:
case PPC_INS_BDZTLA:
case PPC_INS_BDZFLA:
case PPC_INS_BTLR:
case PPC_INS_BFLR:
case PPC_INS_BDNZTLR:
case PPC_INS_BDZTLR:
case PPC_INS_BDZFLR:
case PPC_INS_BTLRL:
case PPC_INS_BFLRL:
case PPC_INS_BDNZTLRL:
case PPC_INS_BDNZFLRL:
case PPC_INS_BDZTLRL:
case PPC_INS_BDZFLRL:
// case PPC_INS_TWLT:
// case PPC_INS_TWEQ:
// case PPC_INS_TWGT:
// case PPC_INS_TWNE:
// case PPC_INS_TWLLT:
// case PPC_INS_TWLGT:
// case PPC_INS_TWLTI:
// case PPC_INS_TWEQI:
// case PPC_INS_TWGTI:
// case PPC_INS_TWNEI:
// case PPC_INS_TWLLTI:
// case PPC_INS_TWLGTI:
// case PPC_INS_TDLT:
// case PPC_INS_TDEQ:
// case PPC_INS_TDGT:
// case PPC_INS_TDNE:
// case PPC_INS_TDLLT:
// case PPC_INS_TDLGT:
// case PPC_INS_TDLTI:
// case PPC_INS_TDEQI:
// case PPC_INS_TDGTI:
// case PPC_INS_TDNEI:
// case PPC_INS_TDLLTI:
// case PPC_INS_TDLGTI:
return true;
}
cs_ppc ppc = insn->detail->ppc;
if (ppc.bc != ppc_bc::PPC_BC_INVALID) {
LOG(WARNING) << "Hit a weird instruction with branch code that fell through, at: 0x" << std::hex << insn->address;
return true;
}
}
else {
LOG(FATAL) << "Unsupported arch: " << m_arch;
}
return false;
}
bool Cfg::is_no_exit_sym(uint64_t sym_addr) const {
if (m_exit_funcs.find(sym_addr) != m_exit_funcs.end()) {
return true;
}
return false;
}
uint64_t Cfg::get_switch_bound(Block *cur_block, uint64_t switch_addr) {
uint64_t bound = 0;
if (!cur_block->leaders.size()) {
LOG(ERROR) << "No leader blocks on switch table, failed to get switch bound";
return bound;
}
for (const auto &leader_addr : cur_block->leaders) {
auto leader_block = m_blocks.find(make_range(leader_addr));
if (leader_block == m_blocks.end()) {
LOG(FATAL) << "Invalid leader address 0x" << std::hex << leader_addr << " on block: 0x" << cur_block->start;
}
auto cmp_meta = leader_block->second.metadata.find(bb_metadata::CMP_LENGTH);
if (cmp_meta == leader_block->second.metadata.end()) {
continue;
}
if (!cmp_meta->second.value) {
continue;
}
bound = cmp_meta->second.value;
VLOG(VLOG_CFG) << "vtable bound val: 0x" << std::hex << bound;
return bound;
}
if (!bound && m_arch == cs_arch::CS_ARCH_X86) {
// Check for an and offset
auto and_meta = cur_block->metadata.find(bb_metadata::AND_OFFSET);
if (and_meta != cur_block->metadata.end()) {
return and_meta->second.value;
}
}
if (!bound && m_arch == cs_arch::CS_ARCH_ARM) {
auto cmp_meta = cur_block->metadata.find(bb_metadata::CMP_LENGTH);
if (cmp_meta != cur_block->metadata.end()) {
return cmp_meta->second.value;
}
return bound;
}
else if (!bound && m_arch == cs_arch::CS_ARCH_MIPS) {
do {
// With mips we can walk up two blocks to try and find our CMP meta tag.
if (!cur_block->leaders.size()) {
break;
}
auto leader_addr = cur_block->leaders.at(0);
auto leader_block = m_blocks.find(make_range(leader_addr));
if (leader_block == m_blocks.end()) {
break;
}
if (!leader_block->second.leaders.size()) {
break;
}
auto next_leader_addr = leader_block->second.leaders.at(0);
auto next_leader_block = m_blocks.find(make_range(next_leader_addr));
if (next_leader_block == m_blocks.end()) {
break;
}
auto cmp_meta = next_leader_block->second.metadata.find(bb_metadata::CMP_LENGTH);
if (cmp_meta == next_leader_block->second.metadata.end()) {
return bound;
}
return cmp_meta->second.value;
}
while (false);
}
else {
LOG(WARNING) << "Failed to find backup architecture boundary for arch: " << static_cast<uint32_t>(m_arch);
}
return bound;
}
template<typename ENTRY, typename VAL>
VAL Cfg::read_switch_entry(const ENTRY *entry, uint32_t idx, bool thumb_multi, uint8_t shift_val) {
ENTRY val = entry[idx];
if (m_state->is_big_endian()) {
if (sizeof(ENTRY) == 4) {
val = __builtin_bswap32(val);
}
else if (sizeof(ENTRY) == 8) {
val = __builtin_bswap64(val);
}
}
VAL read_val = 0;
if (thumb_multi) {
// Spec defines a *2 of any value in the table
read_val = val * 2;
}
else if (shift_val) {
read_val = val << shift_val;
}
else {
read_val = val;
}
return read_val;
}
template<typename ENTRY, typename VAL>
int Cfg::get_switch_table(uint64_t table_addr, Block *parent_block) {
uint64_t cur_addr = table_addr;
VLOG(VLOG_SWT) << "Inspecting switch table: 0x" << std::hex << table_addr << " linked to block: 0x" << parent_block->start;
auto entry = reinterpret_cast<const ENTRY *>(m_memmap->addr_to_ptr(table_addr));
if (!entry) {
LOG(WARNING) << "Failed to find switch addr: 0x" << std::hex << table_addr;
return 1;
}
const MemPage *table_page = m_memmap->addr_to_page(table_addr);
CHECK(table_page) << "Failed to find table page: 0x" << std::hex << table_addr;
// Only track unique entries in the table.
uint32_t idx = 0;
uint64_t base_addr = table_addr;
// TODO: This needs to be much more modular
// In the future I would like to rip out this switch parser system and replace it with
// something that can be built ontop of the extracted metadata
uint8_t shift_left = 0;
bool thumb_mult = false;
// Check if we need to perform a shift inline.
if (m_arch == cs_arch::CS_ARCH_ARM64) {
auto add_meta = parent_block->metadata.find(bb_metadata::ADD_OFFSET);
if (add_meta == parent_block->metadata.end()) {
LOG(FATAL) << "Missing ADD_OFFSET in AARCH64 switch metadata";
}
// in aarch64 tables:
// case block_addr = (addr_of_first_physical_case + (vtable_val << shift_val))
shift_left = add_meta->second.scale;
base_addr = add_meta->second.value;
}
else if (m_arch == cs_arch::CS_ARCH_X86) {
if (parent_block->mode == cs_mode::CS_MODE_32) {
base_addr = 0x0;
}
else {
if (parent_block->metadata.count(bb_metadata::MOV_OFFSET)) {
auto load_meta = parent_block->metadata.find(bb_metadata::LOAD);
if (load_meta == parent_block->metadata.end()) {
LOG(FATAL) << "Failed to find vtable load value";
}
base_addr = load_meta->second.value;
}
}
}
else if (m_arch == cs_arch::CS_ARCH_ARM) {
if (parent_block->mode == cs_mode::CS_MODE_ARM) {
base_addr = 0;
}
else if (parent_block->mode == cs_mode::CS_MODE_THUMB) {
if (parent_block->metadata.count(bb_metadata::SWITCH_INDIRECT)) {
thumb_mult = false;
}
else {
// offsets are PC relative to the TBB/TBH insn, so block->end.
base_addr = parent_block->end;
thumb_mult = true;
}
}
}
else if (m_arch == cs_arch::CS_ARCH_MIPS) {
base_addr = 0;
}
std::set<VAL, std::greater<VAL>> table;
uint64_t table_bound = this->get_switch_bound(parent_block, table_addr);
if (!table_bound) {
LOG(WARNING) << "Failed to get switch table bound for block: 0x" << std::hex << parent_block->start;
return 1;
}
VLOG(VLOG_SWT) << "Discovered switch table size: 0x" << std::hex << table_bound;
while (true) {
auto read_val = this->read_switch_entry<ENTRY, VAL>(entry, idx, thumb_mult, shift_left);
auto cur_ptr = reinterpret_cast<const uint8_t *>(&entry[idx]);
cs_insn *insn;
uint32_t count = 0;
count = cs_disasm(m_cs_handle, cur_ptr, 0, cur_addr, 1, &insn);
if (count) {
// Break if we walk into a NOP instruction
if (is_nop(m_arch, insn)) {
cs_free(insn, count);
break;
}
}
cs_free(insn, count);
VLOG(VLOG_SWT) << "addr: 0x" << std::hex << cur_addr << " vtable value: " << read_val;
table.insert(read_val);
idx++;
cur_addr += sizeof(ENTRY);
auto next_val = this->read_switch_entry<ENTRY, VAL>(entry, idx, thumb_mult, shift_left);
// Stop if we hit an explicit end, found via inspecting leader blocks
if (table_bound && idx > table_bound) {
break;
}
// Stop if we hit a block:
if (m_blocks.count(make_range(cur_addr))) {
break;
}
// Stop if we walk into another vtable addr
if (m_state->get_switch_tables()->count(cur_addr)) {
break;
}
// Stop if we walk off the current page
if (cur_addr >= (table_page->address + table_page->size)) {
break;
}
// stop if the next value is a windows padding
if (sizeof(next_val == 4)) {
if (next_val == 0xcccccccc) {
break;
}
}
// On ARM64 we base off the first case in memory so 0x0 is allowed.
if (m_arch != cs_arch::CS_ARCH_ARM64) {
// Stop if its just pointing back to the vtable itself
if (next_val == 0x0) {
break;
}
}
// Simple check to see if we walked out of a table of negative offsets
if (!table_bound) {
if (read_val < 0) {
if (next_val > 0) {
break;
}
}
// Invert the previous if statement
else {
if (next_val < 0) {
break;
}
}
}
if (!m_memmap->is_text_sec(next_val + base_addr)) {
break;
}
// Use this as a fall back if we don't have table bound
if (!table_bound) {
// Check everything that is not SCAN
// LOG(INFO) << "Next addr: 0x" << std::hex << next_val + base_addr;
if (m_bitmap->get_bit(next_val + base_addr, (MapFlag::SCAN ^ MapFlag::ANY))) {
break;
}
}
}
uint64_t table_size = sizeof(entry[idx]) * idx;
// VLOG(VLOG_CFG) << "vtable bit range: 0x" << std::hex << switch_addr << " size: 0x" << table_size;
m_bitmap->set_bit_range(table_addr, table_size, MapFlag::SWITCH);
for (const auto &offset : table) {
uint64_t block_addr = offset + base_addr;
if(!m_memmap->is_text_sec(block_addr)) {
continue;
}
cs_mode new_mode = parent_block->mode;
if (m_arch == cs_arch::CS_ARCH_ARM && new_mode == cs_mode::CS_MODE_THUMB) {
if (block_addr % 2 != 0) {
block_addr -= 1;
}
}
auto check_addr_it = m_blocks.find(make_range(block_addr));
if (check_addr_it != m_blocks.end()) {
if (check_addr_it->first.first != block_addr) {
LOG(WARNING) << "Switch block: 0x" << std::hex << block_addr << " splits and existing block, boundary most likely wrong";
break;
}
else {
continue;
}
}
Block switch_block(block_addr);
switch_block.mode = new_mode;
if (m_arch == cs_arch::CS_ARCH_ARM) {
if (switch_block.start % 2 != 0) {
switch_block.mode = cs_mode::CS_MODE_THUMB;
}
}
switch_block.callers.emplace_back(parent_block->start);
switch_block.func_addr = parent_block->func_addr;
VLOG(VLOG_SWT) << "Found switch block: 0x" << std::hex << switch_block.start << " from block: 0x" << parent_block->start;
m_block_queue.push(switch_block);
if(!m_block_queue.empty()) {
this->process_block_queue();
}
}
return 0;
}
void Cfg::process_switches() {
VLOG(VLOG_SWT) << "Processing switch vtables";
unsigned int obj_arch = m_obj->getArch();
std::tuple<cs_arch, cs_mode> arch_tup = map_triple_cs(obj_arch);
std::vector<uint64_t> tables_to_del;
// Sanity check, make sure we remove any switch that exist in blocks due to switch metadata
// failures.
for (const auto &kv : *m_state->get_switch_tables()) {
// Ignore bits outside of the text ranges.
if (!m_bitmap->has_addr(kv.first)) {
continue;
}
if (m_bitmap->get_bit(kv.first, MapFlag::BLOCK)) {
LOG(ERROR) << "Switch table: 0x" << std::hex << kv.first << " exists in a block, flags: 0x" << static_cast<uint32_t>(m_bitmap->get_flag(kv.first));
tables_to_del.emplace_back(kv.first);
}
}
// Clean up bad tables.
for (const auto &table : tables_to_del) {
m_state->get_switch_tables()->erase(table);
}
for (const auto &kv : *m_state->get_switch_tables()) {
uint64_t switch_addr = kv.first;
auto parent_block_it = m_blocks.find(make_range(kv.second));
if (parent_block_it == m_blocks.end()) {
LOG(FATAL) << "Bad parent block for switch vtable at: 0x" << std::hex << switch_addr;
}
auto parent_block = parent_block_it->second;
if (m_arch == cs_arch::CS_ARCH_X86) {
this->get_switch_table<int32_t, int32_t>(switch_addr, &parent_block);
}
else if (m_arch == cs_arch::CS_ARCH_ARM) {
if (parent_block.mode == cs_mode::CS_MODE_THUMB) {
if (parent_block.metadata.count(bb_metadata::TBH_INS)) {
this->get_switch_table<uint16_t, uint32_t>(switch_addr, &parent_block);
}
else if (parent_block.metadata.count(bb_metadata::TBB_INS)){
this->get_switch_table<uint8_t, uint32_t>(switch_addr, &parent_block);
}
else if (parent_block.metadata.count(bb_metadata::SWITCH_INDIRECT)) {
this->get_switch_table<int32_t, int32_t>(switch_addr, &parent_block);
}
else {
LOG(FATAL) << "Bad arm switch block!";
}
}
else {
this->get_switch_table<int32_t, int32_t>(switch_addr, &parent_block);
}
}
else if (m_arch == cs_arch::CS_ARCH_ARM64) {
auto load_meta = parent_block.metadata.find(bb_metadata::LOAD);
if (load_meta == parent_block.metadata.end()) {
LOG(FATAL) << "Missing LOAD metadata on aarch64 switch table";
}
if (load_meta->second.scale == 2) {
this->get_switch_table<int16_t, int32_t>(switch_addr, &parent_block);
}
else if (load_meta->second.scale == 1) {
this->get_switch_table<int8_t, int32_t>(switch_addr, &parent_block);
}
else if (load_meta->second.scale == 8) {
this->get_switch_table<int64_t, int32_t>(switch_addr, &parent_block);
}
else {
LOG(FATAL) << "Unsupported load scale: " << load_meta->second.scale;
}
}
else if (m_arch == cs_arch::CS_ARCH_MIPS) {
this->get_switch_table<int32_t, int32_t>(switch_addr, &parent_block);
}
else if (m_arch == cs_arch::CS_ARCH_PPC) {
if (parent_block.mode & cs_mode::CS_MODE_32) {
this->get_switch_table<int32_t, int32_t>(switch_addr, &parent_block);
}
else {
LOG(FATAL) << "PPC64 switch tables unsupported";
}
}
else {
LOG(FATAL) << "unsupported arch for switch processing: " << m_arch;
return;
}
}
m_switches_found += m_state->get_switch_tables()->size();
m_state->get_switch_tables()->clear();
}
// 8 word sizes
#define ARM_SAMPLE_COUNT 8
#define ARM_SAMPLE_SIZE (0x4 * ARM_SAMPLE_COUNT)
uint64_t Cfg::scan_and_queue(std::vector<MemRange> *holes, cs_mode mode) {
CHECK(holes) << "Invalid holes pointer passed to scan_and_queue";
uint64_t found = 0;
cs_err err;
csh cs_handle;
// Set a default mode
cs_mode tmp_mode = mode;
if (tmp_mode == cs_mode::CS_MODE_BIG_ENDIAN) {
tmp_mode = cs_mode::CS_MODE_ARM;
}
err = cs_open(m_arch, tmp_mode, &cs_handle);
if (err != CS_ERR_OK) {
LOG(FATAL) << "cs_open: " << cs_strerror(err);
}
cs_option(cs_handle, CS_OPT_DETAIL, CS_OPT_ON);
cs_option(cs_handle, CS_OPT_SKIPDATA, CS_OPT_ON);
AbiOracle oracle(m_arch, tmp_mode, m_bin_type);
cs_insn *insn = cs_malloc(cs_handle);
CHECK(insn) << "Failed to cs_malloc";
MapFlag not_scan = MapFlag::BLOCK | MapFlag::SWITCH | MapFlag::READ;
bool exhusted_holes = true;
for (auto &hole : *holes) {
// Lets not bother checking tiny holes.
if (hole.size <= 4) {
continue;
}
if (unlikely(!hole.cur_addr)) {
continue;
}
if ((hole.cur_addr > hole.addr) && (hole.size - hole.cur_addr) <= 4) {
continue;
}
exhusted_holes = false;
uint64_t start_offset;
const uint8_t *data_ptr = nullptr;
uint64_t hole_size;
uint64_t cur_addr = hole.cur_addr;
uint64_t end_addr = hole.addr + hole.size;
// VLOG(VLOG_LIN) << "Started walk at: 0x" << std::hex << hole.cur_addr;
// Walk forward over any blocks we discovered in a previous pass
for (; cur_addr < end_addr; cur_addr++) {
if (!this->m_bitmap->get_bit(cur_addr, not_scan)) {
if (m_arch == cs_arch::CS_ARCH_ARM) {
// short aligned for thumb
if (cur_addr % 2 != 0) {
continue;
}
}
else if (m_arch == cs_arch::CS_ARCH_MIPS || m_arch == cs_arch::CS_ARCH_ARM64) {
if (cur_addr % 4 != 0) {
continue;
}
}
start_offset = cur_addr - hole.addr;
data_ptr = hole.ptr + start_offset;
hole_size = hole.size - start_offset;
// VLOG(VLOG_LIN) << "Ended walk at: 0x" << std::hex << cur_addr;
break;
}
}
if (!data_ptr) {
hole.cur_addr = 0;
continue;
}
VLOG(VLOG_LIN) << "Hole addr: 0x" << std::hex << cur_addr << " size: 0x" << hole_size;
cs_mode swap_mode = mode;
// If we are in a arm bin and not able to determine the mode, we need to sample the hole
// Looking for possible signs of arm / thumb code.
if (m_arch == cs_arch::CS_ARCH_ARM && mode == cs_mode::CS_MODE_BIG_ENDIAN) {
if (hole_size < ARM_SAMPLE_SIZE) {
VLOG(VLOG_LIN) << "skipping hole of size: 0x" << std::hex << hole_size << " because it is too small to sample";
hole.cur_addr = 0;
continue;
}
auto hole_words = reinterpret_cast<const uint32_t *>(data_ptr);
uint8_t always_exec_bits = 0;
for (uint8_t i = 0; i < ARM_SAMPLE_COUNT; i++) {
if ((hole_words[i] >> 28) == 0xe) {
always_exec_bits++;
}
}
if (always_exec_bits >= 6) {
if (cur_addr % 4 != 0) {
VLOG(VLOG_LIN) << "Possible arm instructions, but not word aligned. Picking thumb";
swap_mode = cs_mode::CS_MODE_THUMB;
CHECK(cur_addr % 2 == 0) << "Invalid THUMB address, not word aligned, addr: 0x" << std::hex << cur_addr;
}
else {
VLOG(VLOG_LIN) << "Possible arm instructions";
swap_mode = cs_mode::CS_MODE_ARM;
}
}
else {
VLOG(VLOG_LIN) << "Lack of arm instructions, picking thumb";
swap_mode = cs_mode::CS_MODE_THUMB;
CHECK(cur_addr % 2 == 0) << "Invalid THUMB address, not word aligned, addr: 0x" << std::hex << cur_addr;
}
cs_option(cs_handle, CS_OPT_MODE, swap_mode);
oracle.change_mode(swap_mode);
}
// Use for arm starts of either str or push,
// Kinda ugly way to handle it but with arm we need a defined starting point past and imm references
bool arm_save_hit = false;
bool skip_delay = false;
while(cs_disasm_iter(cs_handle, &data_ptr, &hole_size, &cur_addr, insn)) {
// Skip small end blocks
if ((end_addr - insn->address) <= 4) {
hole.cur_addr = 0; // exhausted
break;
}
// when we walk into another block, bail out of disassembly loop,
// Next time this function is called it will do a cheaper walk over all the blocks until is hits a new hole.
if (this->m_bitmap->get_bit(insn->address, not_scan)) {
hole.cur_addr = insn->address;
break;
}
if (skip_delay) {
skip_delay = false;
continue;
}
abi_stat status = oracle.update_insn(insn);
if (status == abi_stat::CONTINUE) {
continue;
}
else if (status == abi_stat::RESET) {
VLOG(VLOG_LIN) << "-- Oracle reset at: 0x" << std::hex << insn->address;
oracle.reset();
if (m_arch == cs_arch::CS_ARCH_MIPS) {
skip_delay = true;
}
continue;
}
else if (status == abi_stat::FOUND) {
uint64_t start_addr = oracle.get_start_addr();
VLOG(VLOG_LIN) << "Found possible block at: 0x" << std::hex << start_addr << " on addr: 0x" << insn->address;
Block scan_block(start_addr);
scan_block.mode = swap_mode;
m_func_queue.push(scan_block);
m_sweep_funcs_found += 1;
found++;
break;
}
else if (status == abi_stat::INVALID) {
uint64_t start_addr = oracle.get_start_addr();
VLOG(VLOG_LIN) << "Invalid function ABI at 0x" << std::hex << start_addr << " on addr: 0x" << insn->address;
break;
}
else {
LOG(FATAL) << "INVALID ORACLE STATUS: " << static_cast<int>(status);
}
}
uint64_t next_addr = insn->address + insn->size;
// Check if we are done with this hole.
if (next_addr > (hole.addr + hole.size)) {
hole.cur_addr = 0;
}
else {
VLOG(VLOG_LIN) << "Next check address 0x" << std::hex << next_addr;
hole.cur_addr = next_addr;
}
if (hole_size == 0) {
hole.cur_addr = 0;
}
if (hole.cur_addr) {
CHECK(hole.cur_addr >= hole.addr && hole.cur_addr < (hole.addr + hole.size)) << "Invalid hole skip addr: 0x" << std::hex << hole.cur_addr;
}
oracle.reset();
}
cs_free(insn, 1);
cs_close(&cs_handle);
if (exhusted_holes) {
holes->clear();
}
return found;
}
void Cfg::do_linear_scan() {
// Skip scanning on bins with no blocks created up to this point. PE files like COM or CLR's
// will have a valid .text but will have no entries exposed. Sweeping through that can cause false positives.
if (m_blocks.empty()) {
LOG(WARNING) << "No blocks in CFG, skipping linear sweep";
return;
}
LOG(INFO) << "Performing linear sweep";
uint64_t scan_blocks = 0;
unsigned int obj_arch = m_obj->getArch();
std::tuple<cs_arch, cs_mode> arch_tup = map_triple_cs(obj_arch);
cs_mode mode = std::get<1>(arch_tup);
// Attempt to determine if the current binary is all ARM or all THUMB
if (m_arch == cs_arch::CS_ARCH_ARM) {
// This step could be done inline while creating blocks,
// if the perf is needed lets perform the operations there.
uint64_t arm_blocks = 0;
uint64_t thumb_blocks = 0;
for (const auto &block_kv : m_blocks) {
if (block_kv.second.mode == cs_mode::CS_MODE_ARM) {
arm_blocks++;
}
else if (block_kv.second.mode == cs_mode::CS_MODE_THUMB) {
thumb_blocks++;
}
else {
LOG(FATAL) << "Invalid block mode: 0x" << std::hex << block_kv.second.start << " mode: " << static_cast<int>(block_kv.second.mode);
}
}
// All arm
if (arm_blocks > 0 && thumb_blocks == 0) {
VLOG(VLOG_LIN) << "All arm blocks, using arm mode for sweep";
mode = cs_mode::CS_MODE_ARM;
}
// All thumb
else if (arm_blocks == 0 && thumb_blocks > 0) {
VLOG(VLOG_LIN) << "All thumb blocks, using thumb mode for sweep";
mode = cs_mode::CS_MODE_THUMB;
}
// Mixed state
else if (arm_blocks > 0 && thumb_blocks > 0) {
LOG(WARNING) << "Mixed thumb / arm binary";
mode = cs_mode::CS_MODE_BIG_ENDIAN; // default out.
}
else {
LOG(FATAL) << "Invalid logic in thumb/arm block calc";
}
}
std::vector<MemRange> holes = this->m_bitmap->get_unset_ranges(m_memmap.get(), MapFlag::ANY);
if (holes.empty()) {
VLOG(VLOG_LIN) << "Failed to find any code holes";
return;
}
do {
scan_blocks = this->scan_and_queue(&holes, mode);
if (holes.empty()) {
VLOG(VLOG_LIN) << "Exhusted all bitmap holes";
break;
}
VLOG(VLOG_LIN) << "Found " << scan_blocks << " blocks from scanning holes" << " holes left: " << holes.size();
this->process_func_queue();
if(!m_block_queue.empty()) {
this->process_block_queue();
}
} while(true);
return;
}
int Cfg::post_process_blocks() {
// Check for any split blocks within the block
// Removes blocks that are standard splits, and kept all blocks that split instructions
// Also mark the instruction splitting blocks for later analysis.
//
// TODO: This invalidates some leader/follower/callers, it might be required to make another follow up
// pass to fix up those. Or make a generic lookup function that checks first the main blocks for a target
// then the split targets map.
for (auto it = m_blocks.begin(); it != m_blocks.end(); ++it) {
auto &kv = *it;
// clear out metadata used in the CFG
kv.second.metadata.clear();
// Find thumb bridges to the plt
if (kv.second.mode == cs_mode::CS_MODE_THUMB) {
// Check for the bx pc block,
uint64_t block_size = kv.second.end - kv.second.start;
if (block_size == 2) {
cs_insn *insn;
cs_option(m_cs_handle, CS_OPT_DETAIL, CS_OPT_ON);
cs_option(m_cs_handle, CS_OPT_MODE, kv.second.mode);
uint32_t count = 0;
count = cs_disasm(m_cs_handle, kv.second.data, block_size, kv.second.start, 1, &insn);
if (!count) {
continue;
}
if (insn[0].id != ARM_INS_BX) {
cs_free(insn, count);
continue;
}
cs_arm arm = insn[0].detail->arm;
if (arm.op_count != 1) {
cs_free(insn, count);
continue;
}
if (arm.operands[0].type != ARM_OP_REG || arm.operands[0].reg != ARM_REG_PC) {
cs_free(insn, count);
continue;
}
cs_free(insn, count);
// Ok found bx pc, now check the follower block:
if (kv.second.followers.size() != 1) {
// Non fatal error here because a 'bx pc' can be a tail call, indicated by a branch_target being filled in.
if (kv.second.branch_target) {
continue;
}
else {
LOG(FATAL) << "Invalid bx pc ARM block in post processing";
}
}
auto follow_block = m_blocks.find(make_range(kv.second.followers.at(0)));
if (follow_block == m_blocks.end()) {
continue;
}
if (follow_block->second.mode != cs_mode::CS_MODE_ARM) {
LOG(FATAL) << "Invalid following block for a bx pc block";
}
const Symbol *sym;
if (!m_resolver->resolve_sym(follow_block->second.branch_target, &sym)) {
continue;
}
// Ok we found a thumb bridge, now symbolize the current iterated block to be a valid symbol
// Todo: invalidate the old symbol?
m_resolver->add_symbol(kv.second.start, *sym);
VLOG(VLOG_CFG) << "New thumb bridge symbol: 0x" << std::hex << kv.second.start << " : " << sym->name;
}
}
}
// Find any blocks that are just jmp to IAT entry blocks and symbolize them to match their targets.
if (this->m_obj->isCOFF()) {
unsigned int obj_arch = m_obj->getArch();
std::tuple<cs_arch, cs_mode> arch_tup = map_triple_cs(obj_arch);
cs_mode mode = std::get<1>(arch_tup);
cs_err err;
csh cs_handle;
err = cs_open(m_arch, mode, &cs_handle);
if (err != CS_ERR_OK) {
LOG(ERROR) << "cs_open: " << cs_strerror(err);
return 1;
}
cs_option(cs_handle, CS_OPT_DETAIL, CS_OPT_ON);
for (auto &kv : m_blocks) {
uint64_t block_size = kv.second.end - kv.second.start;
cs_insn *insn = cs_malloc(cs_handle);
const uint8_t *data_ptr = kv.second.data;
uint64_t tmp_block_addr = kv.second.start;
// Only grab one instruction.
while(cs_disasm_iter(cs_handle, &data_ptr, &block_size, &tmp_block_addr, insn)) {
break;
}
if (!insn) {
continue;
}
if (m_arch == cs_arch::CS_ARCH_X86) {
if (insn->id != X86_INS_JMP) {
cs_free(insn, 1);
continue;
}
}
// This is x86 only, TODO refactor with better get_op_val;
std::vector<uint64_t> imms = get_imm_vals(*insn, m_arch, 0, 0);
for (const auto &imm : imms) {
const Symbol *sym;
if (!m_resolver->resolve_sym(imm, &sym)) {
continue;
}
if (!sym) {
continue;
}
m_resolver->add_symbol(insn->address, *sym);
kv.second.jump_block = true;
VLOG(VLOG_CFG) << "New jump addr: 0x" << std::hex << insn->address << " : " << sym->name;
// Only grab the first imm value, if there are more than one it's most likely a capstone issue.
break;
}
cs_free(insn, 1);
}
cs_close(&cs_handle);
}
return 0;
}
CfgRes<uint64_t> Cfg::libc_start_main_helper(cs_insn *insn, Block *block) const {
switch (m_arch) {
case cs_arch::CS_ARCH_PPC:
return libc_start_main_ppc(insn, block);
default:
break;
}
return CfgRes<uint64_t>(CfgErr::OTHER);
}
CfgRes<uint64_t> Cfg::libc_start_main_ppc(cs_insn *insn, Block *block) const {
if (block->mode & cs_mode::CS_MODE_32) {
auto startup_info_ptr_res = m_state->get_reg_val(ppc_reg::PPC_REG_R8); // Assume standard ABI and GLIBC start_main
if (!startup_info_ptr_res) {
VLOG(VLOG_CFG) << "Failed to get R13 register for startup_info arg at: 0x" << std::hex << insn->address;
auto r3_res = m_state->get_reg_val(ppc_reg::PPC_REG_R3);
if (r3_res) {
VLOG(VLOG_CFG) << "Assuming r3, standard libc_start_main arg is main ptr";
if (!m_memmap->is_text_sec(*r3_res)) {
LOG(WARNING) << "stinfo->main ptr not in text segment at: 0x" << std::hex << insn->address << " ptr: 0x" << *r3_res;
return CfgRes<uint64_t>(CfgErr::OTHER);
}
return CfgRes<uint64_t>(*r3_res);
}
return CfgRes<uint64_t>(CfgErr::NO_REG);
}
// VLOG(VLOG_CFG) << "stinfo ptr: 0x" << std::hex << *startup_info_ptr_res;
auto stinfo_ptr = reinterpret_cast<const uint32_t *>(m_memmap->addr_to_ptr(*startup_info_ptr_res));
if (!stinfo_ptr) {
LOG(WARNING) << "Failed read stinfo ptr at: 0x" << std::hex << insn->address;
return CfgRes<uint64_t>(CfgErr::BAD_READ);
}
auto main_val = stinfo_ptr[1];
if (block->mode & cs_mode::CS_MODE_BIG_ENDIAN){
main_val = __builtin_bswap32(main_val);
}
if (!m_memmap->is_text_sec(main_val)) {
LOG(WARNING) << "stinfo->main ptr not in text segment at: 0x" << std::hex << insn->address << " ptr: 0x" << main_val;
return CfgRes<uint64_t>(CfgErr::OTHER);
}
return CfgRes<uint64_t>(main_val);
}
else {
LOG(WARNING) << "Skipping PPC64 libc_start_main inspection";
}
return CfgRes<uint64_t>(CfgErr::OTHER);
}
|
<reponame>TartuNLP/bertnernazgul
from pathlib import Path
from configparser import ConfigParser
_config = ConfigParser()
_config.read("config/config.ini")
STANZA_PATH = _config['models']['stanza']
BERT_PATH = _config['models']['bert']
Path("logs/").parents[0].mkdir(parents=True, exist_ok=True)
|
set -e
mkdir -p build
cd build
cmake ..
make -j20
./pbrt ../scenes/sure_test_scenes/staircase/scene_pt.pbrt
|
#!/bin/sh
# This script will automatically join the mesh network at PPS.
# Assuming you run Babel and AHCP, you can probably adapt it to your own
# network by tweaking the following parameters:
essid="pps"
channel=6
die() {
echo "$@" >&2
exit 1
}
findcmd() {
type "$1" > /dev/null || \
die "Couldn't find $1, please install ${2:-it} or fix your path."
}
if [ "$(whoami)" != root ]; then
die "Sorry, you need to be root."
fi
usage="Usage: $0 [-N] [-d debuglevel] interface"
debuglevel=0
nodns=
while getopts "Nd:" name
do
case $name in
d) debuglevel="$OPTARG";;
N) nodns=1;;
?) die "$usage"
esac
done
shift $(($OPTIND - 1))
[ $# -lt 1 ] && die "$usage"
interfaces="$@"
findcmd iwconfig "wireless-tools"
findcmd babeld
findcmd ahcpd
modprobe ipv6
wait
(ip -6 addr show dev lo | grep -q 'inet6') || \
die "No IPv6 address on lo, please modprobe ipv6"
while [ "$1" != "" ] ; do
interface="$1"
# order is important for mac80211-based drivers
ifconfig "$interface" down || die "Couldn't configure interface"
iwconfig "$interface" mode ad-hoc || die "Couldn't configure interface"
ifconfig "$interface" up || die "Couldn't configure interface"
iwconfig "$interface" essid "$essid" channel $channel || \
die "Couldn't configure interface"
ip link set up dev "$interface" || die "Couldn't up interface"
shift
done
terminate() {
echo -n 'Killing ahcpd and babel...'
[ -e /var/run/ahcpd.pid ] && kill "$(cat /var/run/ahcpd.pid)"
# give ahcpd time to release the lease
sleep 2
[ -e /var/run/babeld.pid ] && kill "$(cat /var/run/babeld.pid)"
echo 'done.'
trap - EXIT
exit 0
}
trap terminate INT QUIT TERM EXIT
# allow time for the link layer to associate
sleep 1
babeld ${debuglevel:+-d} $debuglevel -z 3 $interfaces &
ahcpd ${nodns:+-N} ${debuglevel:+-d} $debuglevel $interfaces &
echo "Ahcpd and babeld running on $interfaces, press ^C to terminate."
while :; do sleep 3600; done
|
require 'rspec'
require_relative '../lib/spotify-playlist'
|
import React from 'react'
import Icon from 'react-native-vector-icons/MaterialIcons'
//import all the components we are going to use
import { StyleSheet, View, Text, ScrollView, Animated } from 'react-native';
export default function Tabs({ translateY }) {
return (
<Animated.View style={[styles.container, {
transform: [{
translateY: translateY.interpolate({
inputRange: [0, 380],
outputRange: [0, 30],
extrapolate: 'clamp',
}),
}],
opacity: translateY.interpolate({
inputRange: [0, 380],
outputRange: [1, 0.1],
extrapolate: 'clamp',
}),
}]}>
<ScrollView contentContainerStyle={styles.tabsContainer} horizontal={true} showHorizontalIndicator={false} >
<View style={styles.tabItem}>
<Icon name='person-add' size={20} color='#fff' />
<Text style={styles.text}>Indicar Amigos</Text>
</View>
<View style={styles.tabItem}>
<Icon name='chat-bubble-outline' size={20} color='#fff' />
<Text style={styles.text}>Cobrar</Text>
</View>
<View style={styles.tabItem}>
<Icon name='arrow-downward' size={20} color='#fff' />
<Text style={styles.text}>Depositar</Text>
</View>
<View style={styles.tabItem}>
<Icon name='arrow-upward' size={20} color='#fff' />
<Text style={styles.text}>Transferir</Text>
</View>
<View style={styles.tabItem}>
<Icon name='lock' size={20} color='#fff' />
<Text style={styles.text}>Bloquear Cartão</Text>
</View>
</ScrollView>
</Animated.View>
)
}
const styles = StyleSheet.create({
container: {
flex: 1,
height: 100,
marginTop: 20,
paddingRight: 0,
paddingBottom: 30
},
tabsContainer: {
paddingLeft: 10,
paddingRight: 20
},
tabItem: {
width: 100,
height: 100,
backgroundColor: 'rgba(255, 255, 255, 0.2)',
borderRadius: 3,
marginLeft: 10,
padding: 10,
justifyContent: 'space-between'
},
text: {
fontSize: 13,
color: '#fff'
}
});
|
package com.momo.mapper.res.aclmanager;
import com.momo.mapper.dataobject.RoleDO;
import lombok.*;
/**
* @program: momo-cloud-permission
* @description: TODO
* @author: <NAME>
* @create: 2019-08-05 22:00
**/
@Getter
@Setter
@ToString
@Builder
@NoArgsConstructor
@AllArgsConstructor
//@EqualsAndHashCode(callSuper = true, of = {"id"})
public class SysRolePageListRes extends RoleDO {
//是否显示编辑按钮
private boolean editButtonShow = true;
//是否显示授权按钮
private boolean authorButtonShow = true;
//是否显示状态按钮
private boolean disabledFlagButtonShow = true;
}
|
<filename>app.js<gh_stars>0
'use strict';
//GLOBAL VARIABLES
var salmonStore = [];
var _random = function getRandomArbitrary(min, max) {
return Math.random() * (max - min) + min;
};
//FUNCTIONS
//Constructor Function that builds a Store.
var Store = function (name, min_customers, max_customers, avg_cookies_per_customer, open_time, close_time) {
this.name = name;
this.min_customers = min_customers;
this.max_customers = max_customers;
this.avg_cookies_per_hour = [];
this.time = [];
this.avg_cookies_per_customer = avg_cookies_per_customer;
this.open_time = open_time;
this.close_time = close_time;
};
//Method of Store Contructor Function -- calculates the average number of cookies per hour
Store.prototype.number_cookies_per_hour = function () {
for (var i = 0; i < 14; i++) {
var number_customers = Math.floor(_random(this.min_customers, this.max_customers));
var cookies = (number_customers * Math.floor(this.avg_cookies_per_customer));
this.avg_cookies_per_hour.push(cookies);
}
};
//Method of Store Contructor Function -- calculates the total numbers of hours the store is open
Store.prototype.calculateTime = function () {
var total_hours = this.close_time - this.open_time;
for (var j = 0; j < total_hours; j++) {
var current_hour = this.open_time + j;
this.time.push(current_hour);
}
};
//Stand Alone Function -- creates a table that presents cookies by store by hour data.
var create_table = function() {
document.getElementById('store-table').innerHTML = '';
var target = document.getElementById("store-table");
var table_row = document.createElement("tr");
//top row
//blank square
var store_table_element = document.createElement("td");
store_table_element.textContent = '';
table_row.appendChild(store_table_element);
//adding each hour that store is open
for (var p = 0; p < 14; p++) {
//stores each hour in a different element - left to right
var store_table_element_2 = document.createElement("td");
store_table_element_2.textContent = salmonStore[0].time[p];
table_row.appendChild(store_table_element_2);
}
//total heading
var store_table_element_3 = document.createElement("td");
store_table_element_3.textContent = 'Total';
table_row.appendChild(store_table_element_3);
//attach to row
target.appendChild(table_row);
//add locations
for (var h=0; h < salmonStore.length; h++) {
var table_row_2 = document.createElement('tr');
//store name, first column
var store_table_element_4 = document.createElement("td");
store_table_element_4.textContent = salmonStore[h].name;
table_row_2.appendChild(store_table_element_4);
//average cookies by store
var runningTotal= 0;
var currentAmount;
for (var q = 0; q < 14; q++) {
currentAmount === 0;
//stores the avg cookies per hour by hour - left to right
var store_table_element_5 = document.createElement('td');
store_table_element_5.textContent = salmonStore[h].avg_cookies_per_hour[q];
table_row_2.appendChild(store_table_element_5);
//keeps a running total of the sum
currentAmount = salmonStore[h].avg_cookies_per_hour[q];
runningTotal = runningTotal + currentAmount;
console.log('running total' + runningTotal);
}
//sending total cookies for the store to the page
var store_table_element_6 = document.createElement('td');
store_table_element_6.textContent = runningTotal;
table_row_2.appendChild(store_table_element_6);
//attach to the row
target.appendChild(table_row_2);
}
};
//Stand Alone Function -- creates the table footer that shows the totals
var create_table_footer = function () {
var target = document.getElementById('store-table');
var table_footer_row = document.createElement('tr');
var store_footer_element = document.createElement('td');
//total, first column
store_footer_element.textContent = 'Total';
table_footer_row.appendChild(store_footer_element);
//stores totals by hour for all stores
for (var s = 0; s < 14; s++) {
var totalForEachHour = 0;
var grandTotal = 0;
var store_footer_element_2 = document.createElement('td');
for (var g = 0; g < salmonStore.length; g++) {
totalForEachHour = salmonStore[g].avg_cookies_per_hour[s];
grandTotal = totalForEachHour + grandTotal;
}
store_footer_element_2.textContent = grandTotal;
table_footer_row.appendChild(store_footer_element_2);
}
//total of the totals
var store_footer_element_3 = document.createElement('td');
var sum_of_array = 0;
var total_of_array= 0;
for (var t = 0; t < 14; t++) {
//stores each hour in a different element - left to right
for (var f = 0; f < salmonStore.length; f++) {
sum_of_array = salmonStore[f].avg_cookies_per_hour[t];
total_of_array = sum_of_array + total_of_array;
}
}
store_footer_element_3.textContent = total_of_array;
table_footer_row.appendChild(store_footer_element_3);
//attach to row
target.appendChild(table_footer_row);
};
//Stand Alone Function - creates an instance of a store
var createStore = function(name, min_customers, max_customers, avg_cookies_per_customer, open_time, close_time){
var newStore = new Store(name, min_customers, max_customers, avg_cookies_per_customer, open_time, close_time);
newStore.calculateTime();
newStore.number_cookies_per_hour();
//newStore.sum_cookies();
salmonStore.push(newStore);
};
//Stand Alone Function -- add a new store and recrete the table
//this function accepts input from a from the viewer sees on the sales page and processes
//the information collected to create a new store
var handle_form_input = function (form_event) {
// clears the default(whatever that is)
form_event.preventDefault();
//Assigns variable names to all the content that came in with the form.
var name = form_event.target.storeName.value;
var min_customers = form_event.target.minCust.value;
var max_customers = form_event.target.maxCust.value;
var open_time = form_event.target.openTime.value;
var close_time = form_event.target.closeTime.value;
var avg_cookies_per_customer = form_event.target.cookiesPerCustomer.value;
//creates an instance of the newStore object
var newStore = new Store(name, min_customers, max_customers, avg_cookies_per_customer, open_time, close_time);
newStore.calculateTime();
newStore.number_cookies_per_hour();
//newStore.sum_cookies();
salmonStore.push(newStore);
//rebuild table
create_table();
create_table_footer();
};
//MAIN BODY OF CODE
createStore('First and Pike', 23, 65, 6.3, 6, 20);
createStore('SeaTac Airport', 3, 24, 1.2, 6, 20);
createStore('Seattle Center', 11, 38, 3.7, 6, 20);
createStore('Capitol Hill', 20, 38, 2.3, 6, 20);
createStore('Alki', 6, 16, 4.6, 6, 20);
create_table();
create_table_footer();
//LISTENER
//accesses a listener on the browser - this listener is only activated within the <form> part of the html, because this is where 'input' is tied to the 'submit'id. Once the event is triggered the function handle_form_input will begin.
//Attaches what we are doing to the element that includes the id "new-store-form".
var store_form = document.getElementById('new-store-form');
//The listener
store_form.addEventListener('submit', handle_form_input);
|
<filename>kdl_wagtail/zotero/migrations/0008_add_verbose_name_to_bibliography_fields.py<gh_stars>1-10
# Generated by Django 2.2.1 on 2019-05-16 10:26
from django.db import migrations
import wagtail.core.fields
class Migration(migrations.Migration):
dependencies = [
('kdl_wagtail_zotero', '0007_bibliography_citation_short'),
]
operations = [
migrations.AlterField(
model_name='bibliography',
name='bib',
field=wagtail.core.fields.RichTextField(verbose_name='bibliography entry'),
),
migrations.AlterField(
model_name='bibliography',
name='citation',
field=wagtail.core.fields.RichTextField(verbose_name='note'),
),
migrations.AlterField(
model_name='bibliography',
name='citation_short',
field=wagtail.core.fields.RichTextField(null=True, verbose_name='shortnote'),
),
]
|
class GraphConverter:
def __init__(self, data):
self.data = data
def get_graph(self):
"""
Retrieves the graph data from the class.
"""
def fill_graph(self):
"""
Fills the graph data with the required format for ECharts.
"""
# Your solution code here to transform self.data into ECharts format
def run(self):
"""
Executes the process of getting and filling the graph data and returns the transformed data.
"""
|
#! /bin/sh
# Copyright (C) 2011-2014 Free Software Foundation, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Try to find the gettext '.m4' files and make them easily accessed
# to the test cases requiring them.
# See also automake bug#9807.
. test-init.sh
extract_program_version ()
{
"$1" --version | sed 1q | $PERL -ne '/(\d(?:\.\d+)+)/ and print "$1\n"'
}
echo "# Automatically generated by $me." > get.sh
echo : >> get.sh
# The 'gettextize' and 'autopoint' scripts will look into Makefile.am.
echo ACLOCAL_AMFLAGS = -I m4 > Makefile.am
# Prefer autopoint to gettextize, since the more modern versions of the
# latter might unconditionally require user interaction to complete;
# yes, this means confirmation from /dev/tty (!) -- see:
# <http://lists.gnu.org/archive/html/bug-gettext/2011-12/msg00000.html>
# Since this "forced interaction" behaviour of gettextize wasn't present
# before the introduction of autopoint, we should be able to safely
# fall back to calling gettextize non-interactively if autopoint is not
# present.
if autopoint --version; then
am_gettextize_command=autopoint
else
am_gettextize_command=gettextize
fi
# We will need to specify the correct autopoint (or gettextize) version
# in the AM_GNU_GETTEXT_VERSION call in configure.ac if we want autopoint
# (or gettextize) to setup the correct infrastructure -- in particular,
# for what concerns us, to bring in all the required .m4 files.
autopoint_version=$(extract_program_version $am_gettextize_command) \
&& test -n "$autopoint_version" \
|| autopoint_version=0.10.35
cat > configure.ac <<END
AC_INIT([foo], [1.0])
AC_PROG_CC
# Both required by autopoint.
AM_GNU_GETTEXT
AM_GNU_GETTEXT_VERSION([$autopoint_version])
END
if $am_gettextize_command --force && test -f m4/gettext.m4; then
echo "ACLOCAL_PATH='$(pwd)/m4':\$ACLOCAL_PATH" >> get.sh
echo "export ACLOCAL_PATH" >> get.sh
else
# Older versions of gettext might not have a gettextize program
# available, but this doesn't mean the user hasn't made the gettext
# macros available, e.g., by properly setting ACLOCAL_PATH.
rm -rf m4
mkdir m4
# See below for an explanation about the use the of '-Wno-syntax'.
if $ACLOCAL -Wno-syntax -I m4 --install && test -f m4/gettext.m4; then
: # Gettext macros already accessible by default.
else
echo "skip_all_ \"couldn't find or get gettext macros\"" >> get.sh
fi
fi
cat >> get.sh <<'END'
# Even recent versions of gettext used the now-obsolete 'AM_PROG_MKDIR_P'
# m4 macro. So we need the following to avoid spurious errors.
ACLOCAL="$ACLOCAL -Wno-obsolete"
AUTOMAKE="$AUTOMAKE -Wno-obsolete"
END
. ./get.sh
$ACLOCAL --force -I m4 || cat >> get.sh <<'END'
# We need to use '-Wno-syntax', since we do not want our test suite
# to fail merely because some third-party '.m4' file is underquoted.
ACLOCAL="$ACLOCAL -Wno-syntax"
END
# Remove any Makefile.in possibly created by gettextize/autopoint, to
# avoid spurious maintainer-check failures.
rm -f $(find . -name Makefile.in)
# The file gettextize or autopoint might have copied in the 'm4'
# subdirectory of the test directory are going to be needed by
# other tests, so we must not remove the test directory.
keep_testdirs=yes
:
|
package moze_intel.projecte.api.mapper.generator;
import java.util.Map;
/**
* Defines something that can simply yield a mapping of values.
*
* @param <T> The key type
* @param <V> The value type
*/
public interface IValueGenerator<T, V extends Comparable<V>> {
/**
* Generate values for a mapper.
*/
Map<T, V> generateValues();
}
|
/*
Copyright 2018 The OpenEBS Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package startcontroller
import (
"fmt"
"os"
"strconv"
"sync"
"time"
"github.com/golang/glog"
kubeinformers "k8s.io/client-go/informers"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/rest"
"k8s.io/client-go/tools/clientcmd"
"github.com/openebs/maya/cmd/cstor-volume-mgmt/controller/common"
volumecontroller "github.com/openebs/maya/cmd/cstor-volume-mgmt/controller/volume-controller"
"github.com/openebs/maya/cmd/cstor-volume-mgmt/volume"
clientset "github.com/openebs/maya/pkg/client/generated/clientset/versioned"
informers "github.com/openebs/maya/pkg/client/generated/informers/externalversions"
"github.com/openebs/maya/pkg/signals"
"github.com/openebs/maya/pkg/util"
)
const (
// NumThreads defines number of worker threads for resource watcher.
NumThreads = 1
// NumRoutinesThatFollow is for handling golang waitgroups.
NumRoutinesThatFollow = 1
)
// StartControllers instantiates CStorVolume controllers
// and watches them.
func StartControllers(kubeconfig string) {
// Set up signals to handle the first shutdown signal gracefully.
stopCh := signals.SetupSignalHandler()
cfg, err := getClusterConfig(kubeconfig)
if err != nil {
glog.Fatalf(err.Error())
}
kubeClient, err := kubernetes.NewForConfig(cfg)
if err != nil {
glog.Fatalf("Error building kubernetes clientset: %s", err.Error())
}
openebsClient, err := clientset.NewForConfig(cfg)
if err != nil {
glog.Fatalf("Error building openebs clientset: %s", err.Error())
}
volume.FileOperatorVar = util.RealFileOperator{}
volume.UnixSockVar = util.RealUnixSock{}
// Blocking call for checking status of istgt running in cstor-volume container.
util.CheckForIscsi(volume.UnixSockVar)
// Blocking call for checking status of CStorVolume CR.
common.CheckForCStorVolumeCRD(openebsClient)
// NewInformer returns a cache.Store and a controller for populating the store
// while also providing event notifications. It’s basically a controller with some
// boilerplate code to sync events from the FIFO queue to the downstream store.
kubeInformerFactory := kubeinformers.NewSharedInformerFactory(kubeClient, getSyncInterval())
openebsInformerFactory := informers.NewSharedInformerFactory(openebsClient, getSyncInterval())
cStorVolumeController := volumecontroller.NewCStorVolumeController(kubeClient, openebsClient, kubeInformerFactory,
openebsInformerFactory)
go kubeInformerFactory.Start(stopCh)
go openebsInformerFactory.Start(stopCh)
// Waitgroup for starting volume controller goroutines.
var wg sync.WaitGroup
wg.Add(NumRoutinesThatFollow)
// Run controller for cStorVolume.
go func() {
if err = cStorVolumeController.Run(NumThreads, stopCh); err != nil {
glog.Fatalf("Error running CStorVolume controller: %s", err.Error())
}
wg.Done()
}()
wg.Wait()
}
// GetClusterConfig return the config for k8s.
func getClusterConfig(kubeconfig string) (*rest.Config, error) {
var masterURL string
cfg, err := rest.InClusterConfig()
if err != nil {
glog.Errorf("Failed to get k8s Incluster config. %+v", err)
if len(kubeconfig) == 0 {
return nil, fmt.Errorf("kubeconfig is empty: %v", err.Error())
}
cfg, err = clientcmd.BuildConfigFromFlags(masterURL, kubeconfig)
if err != nil {
return nil, fmt.Errorf("Error building kubeconfig: %s", err.Error())
}
}
return cfg, err
}
// getSyncInterval gets the resync interval from environment variable.
// If missing or zero then default to DefaultSharedInformerInterval
// otherwise return the obtained value
func getSyncInterval() time.Duration {
resyncInterval, err := strconv.Atoi(os.Getenv("RESYNC_INTERVAL"))
if err != nil || resyncInterval == 0 {
glog.Warningf("Incorrect resync interval %q obtained from env, defaulting to %q seconds", resyncInterval, common.DefaultSharedInformerInterval)
return common.DefaultSharedInformerInterval
}
return time.Duration(resyncInterval) * time.Second
}
|
<filename>src/offer/offer.service.ts
import { Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { DeleteResult, Repository } from 'typeorm';
import { Offer } from './../offer/offer.entity';
import { paginate, PaginateOptions } from './../pagination/paginator';
import { offerSearchQuery } from './offer.dto';
@Injectable()
export class OfferService {
constructor(
@InjectRepository(Offer)
private readonly offerRepository: Repository<Offer>,
) {}
private getOffersBaseQuery() {
return this.offerRepository.createQueryBuilder('o').orderBy('o.id', 'DESC');
}
public getOffersWithCountOfParticipantsAndApplicants() {
return this.getOffersBaseQuery()
.loadRelationCountAndMap('o.applicantCount', 'o.applicants')
.loadRelationCountAndMap('o.participantCount', 'o.participants');
}
public searchOffers(search: offerSearchQuery) {
const { title, skillId, ownerId } = search;
let base = this.getOffersWithCountOfParticipantsAndApplicants();
if (title)
base.andWhere('LOWER(o.title) like LOWER(:title)', {
title: `%${title}%`,
});
if (skillId) base.andWhere('o.skillId = :skillId', { skillId });
if (ownerId) base.andWhere('o.ownerId = :ownerId', { ownerId });
return base;
}
public async getFilteredOffersPaginated(
paginateOptions: PaginateOptions,
search: offerSearchQuery,
) {
return await paginate(this.searchOffers(search), paginateOptions);
}
public async getSingleOffer(id: number) {
return await this.getOffersWithCountOfParticipantsAndApplicants()
.andWhere('o.id = :id', {
id,
})
.leftJoin('o.skill', 'skill')
.leftJoin('o.owner', 'owner')
.leftJoin('o.participants', 'participants')
.leftJoin('o.applicants', 'applicants')
.select([
'o',
'skill.id',
'skill.name',
'owner.username',
'owner.email',
'owner.id',
'participants.id',
'participants.username',
'applicants.id',
'applicants.username',
])
.getOne();
}
public async getApplicantsByOwner(ownerId: number) {
return await this.searchOffers({ ownerId: ownerId })
.leftJoin('o.applicants', 'applicants')
.select([
'o.id',
'o.title',
'o.description',
'applicants.id',
'applicants.username',
])
.getMany();
}
public async deleteOffer(id: number): Promise<DeleteResult> {
return await this.offerRepository
.createQueryBuilder('o')
.delete()
.where('id = :id', { id })
.execute();
}
}
|
def print_even(n):
for i in range(0, n+1):
if i%2 == 0:
print(i)
|
<gh_stars>1-10
import { Module } from '@nestjs/common';
import { GraphQLModule } from '@nestjs/graphql';
import { CookieSerializer } from '@ultimatebackend/common';
import { CoreModule, RolesRpcClientService, ServiceRegistryModule } from '@ultimatebackend/core';
import { AppController } from './app.controller';
import { AppService } from './app.service';
import { RolesModule } from './roles/roles.module';
import { AccountsModule } from './accounts/accounts.module';
import { TenantsModule } from './tenants/tenants.module';
import { WebhooksModule } from './webhooks/webhooks.module';
import { BillingsModule } from './billings/billings.module';
import { CardsModule } from './cards/cards.module';
import { PlansModule } from './plans/plans.module';
import { NotificationsModule } from './notifications/notifications.module';
import { UsersModule } from './users/users.module';
import { TenantMembersModule } from './tenant-members/tenant-members.module';
import { GqlConfigService } from './gql-config.service';
import { AccessTokenModule } from './access-token/access-token.module';
import { GlobalClientModule } from './common/global-client.module';
@Module({
imports: [
ServiceRegistryModule,
GraphQLModule.forRootAsync({
useClass: GqlConfigService,
}),
GlobalClientModule,
CoreModule,
RolesModule,
AccountsModule,
TenantsModule,
WebhooksModule,
BillingsModule,
CardsModule,
PlansModule,
NotificationsModule,
UsersModule,
TenantMembersModule,
AccessTokenModule,
],
controllers: [AppController],
providers: [AppService, CookieSerializer, RolesRpcClientService],
})
export class AppModule {}
|
class Room:
def __init__(self, name, description, items=None, obstacles=None, exits=None):
self.name = name
self.description = description
self.items = items if items else []
self.obstacles = obstacles if obstacles else {}
self.exits = exits if exits else {}
def add_item(self, item):
self.items.append(item)
def remove_item(self, item):
if item in self.items:
self.items.remove(item)
def add_obstacle(self, direction, obstacle):
self.obstacles[direction] = obstacle
def remove_obstacle(self, direction):
if direction in self.obstacles:
del self.obstacles[direction]
def add_exit(self, direction, room):
self.exits[direction] = room
def get_exit(self, direction):
return self.exits.get(direction, None)
class Player:
def __init__(self, name, current_room):
self.name = name
self.current_room = current_room
self.inventory = []
def move(self, direction):
next_room = self.current_room.get_exit(direction)
if next_room:
self.current_room = next_room
print("You have moved to", next_room.name)
print(next_room.description)
else:
print("You cannot go that way.")
def take_item(self, item_name):
if item_name in self.current_room.items:
self.inventory.append(item_name)
self.current_room.remove_item(item_name)
print("You have taken", item_name)
else:
print("There is no", item_name, "here.")
def interact_with_obstacle(self, direction):
obstacle = self.current_room.obstacles.get(direction, None)
if obstacle:
print("You interact with the", obstacle)
# Add obstacle interaction logic here
else:
print("There is no obstacle in that direction.")
# Create rooms
starting_room = Room("Starting Room", "You are in a dimly lit room. There is a door to the north.", items=["key"])
next_room = Room("Next Room", "You have entered a bright room with a window to the east.", obstacles={"east": "locked door"})
final_room = Room("Final Room", "Congratulations! You have reached the final room.")
# Connect rooms
starting_room.add_exit("north", next_room)
next_room.add_exit("south", starting_room)
next_room.add_exit("east", final_room)
# Create player
player = Player("Player1", starting_room)
# Game loop
while player.current_room != final_room:
command = input("Enter your command: ").lower()
if command in ["north", "south", "east", "west"]:
player.move(command)
elif command.startswith("take"):
item_name = command.split(" ", 1)[1]
player.take_item(item_name)
elif command.startswith("interact"):
direction = command.split(" ", 1)[1]
player.interact_with_obstacle(direction)
else:
print("Invalid command. Try again.")
print("You have won the game!")
|
<reponame>lgoldstein/communitychest
package net.community.chest.awt.layout.gridbag;
import net.community.chest.lang.StringUtil;
import net.community.chest.reflect.NumberValueStringConstructor;
/**
* <P>Copyright 2007 as per GPLv2</P>
*
* @author <NAME>.
* @since Dec 4, 2007 2:09:48 PM
*/
public class GridBagSizingValueStringInstantiator extends NumberValueStringConstructor<Integer> {
public GridBagSizingValueStringInstantiator ()
{
super(Integer.TYPE, Integer.class);
}
/*
* @see net.community.chest.reflect.ValueStringConstructor#convertInstance(java.lang.Object)
*/
@Override
public String convertInstance (final Integer inst) throws Exception
{
if (null == inst)
return null;
final GridBagGridSizingType szType=GridBagGridSizingType.fromSpecValue(inst.intValue());
if (szType != null)
return szType.toString();
return super.convertInstance(inst);
}
/*
* @see net.community.chest.reflect.ValueStringConstructor#newInstance(java.lang.String)
*/
@Override
public Integer newInstance (final String v) throws Exception
{
final String s=StringUtil.getCleanStringValue(v);
final GridBagGridSizingType szType=GridBagGridSizingType.fromString(s);
if (szType != null) // check if one of the special sizing types
return Integer.valueOf(szType.getSpecValue());
return super.newInstance(s);
}
public static final GridBagSizingValueStringInstantiator DEFAULT=new GridBagSizingValueStringInstantiator();
}
|
def increment_list(arr):
for i in range(len(arr)):
arr[i] += 1
return arr
|
# Copyright (c) 2017, 2019, Oracle Corporation and/or its affiliates. All rights reserved.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl.
set -o pipefail
#
# Purpose:
# Defines various shared utility functions, including a trace function.
#
# Load this file via the following pattern:
# SCRIPTPATH="$( cd "$(dirname "$0")" > /dev/null 2>&1 ; pwd -P )"
# source ${SCRIPTPATH}/utils.sh
# [ $? -ne 0 ] && echo "[SEVERE] Missing file ${SCRIPTPATH}/utils.sh" && exit 1
#
# exportInstallHomes
# purpose: export MW_HOME, WL_HOME, ORACLE_HOME
# with defaults as needed
function exportInstallHomes() {
export ORACLE_HOME=${ORACLE_HOME:-${MW_HOME}}
if [ -z ${ORACLE_HOME} ]; then
if [ -z ${WL_HOME} ]; then
export ORACLE_HOME='/u01/oracle'
else
export ORACLE_HOME="`dirname ${WL_HOME}`"
fi
fi
export WL_HOME=${WL_HOME:-${ORACLE_HOME}/wlserver}
export MW_HOME=${MW_HOME:-${ORACLE_HOME}}
}
# timestamp
# purpose: echo timestamp in the form yyyymmddThh:mm:ss.mmm ZZZ
# example: 20181001T14:00:00.001 UTC
function timestamp() {
local timestamp="`date --utc '+%Y-%m-%dT%H:%M:%S %N %s %Z' 2>&1`"
if [ ! "${timestamp/illegal/xyz}" = "${timestamp}" ]; then
# old shell versions don't support %N or --utc
timestamp="`date -u '+%Y-%m-%dT%H:%M:%S 000000 %s %Z' 2>&1`"
fi
local ymdhms="`echo $timestamp | awk '{ print $1 }'`"
# convert nano to milli
local milli="`echo $timestamp | awk '{ print $2 }' | sed 's/\(^...\).*/\1/'`"
local secs_since_epoch="`echo $timestamp | awk '{ print $3 }'`"
local millis_since_opoch="${secs_since_epoch}${milli}"
local timezone="`echo $timestamp | awk '{ print $4 }'`"
echo "${ymdhms}.${milli} ${timezone}"
}
#
# trace [-cloc caller-location] -n [log-level] [text]*
# trace [-cloc caller-location] -pipe [log-level] [text]*
# trace [-cloc caller-location] [log-level] [text]*
#
# Generate logging in a format similar to WLST utils.py using the
# same timestamp format as the Operator, and using the same
# log levels as the Operator. This logging is may be parsed by the
# Operator when it reads in a job or pod log.
#
# log-level can be one of SEVERE|ERROR|WARNING|INFO|CONFIG|FINE|FINER|FINEST
# - Default is 'FINE'.
# - NOTE: Use SEVERE, ERROR, WARNING, INFO sparingly since these log-levels
# are visible by default in the Operator log and the Operator captures
# some script logs and echos them to the Operator log.
# - if it's ERROR it's converted to SEVERE
# - if there's no log-level and the text starts with a
# recognized keyword like 'Error:' then the log-level is inferred
# - if there's no log-level and the text does not start with
# a recognized keyword, the log-level is assumed to be 'FINE' (the default)
#
# -n Suppress new-line.
#
# -pipe Redirect stdout through a trace, see example below.
#
# -cloc Use the supplied value as the caller location
#
# examples:
# trace "Situation normal."
# @[2018-09-28T18:10:52.417 UTC][myscript.sh:91][FINE] Situation normal.
#
# trace INFO "Situation normal."
# @[2018-09-28T18:10:52.417 UTC][myscript.sh:91][INFO] Situation normal.
#
# trace "Info: Situation normal."
# @[2018-09-28T18:10:52.417 UTC][myscript.sh:91][INFO] Info: Situation normal.
#
# ls 2>&1 | tracePipe FINE "ls output: "
# @[2018-09-28T18:10:52.417 UTC][myscript.sh:91][FINE] ls output: file1
# @[2018-09-28T18:10:52.417 UTC][myscript.sh:91][FINE] ls output: file2
#
# Set TRACE_INCLUDE_FILE env var to false to suppress file name and line number.
#
function trace() {
(
set +x
local logLoc=""
if [ ${TRACE_INCLUDE_FILE:-true} = "true" ]; then
if [ "$1" = "-cloc" ]; then
logLoc="$2"
shift
shift
else
logLoc="`basename $0`:${BASH_LINENO[0]}"
fi
else
if [ "$1" = "-cloc" ]; then
shift
shift
fi
fi
local logMode='-normal'
case $1 in
-pipe|-n) logMode=$1; shift; ;;
esac
# Support log-levels in operator, if unknown then assume FINE
# SEVERE|WARNING|INFO|CONFIG|FINE|FINER|FINEST
# (The '^^' trick below converts the var to upper case.)
local logLevel='FINE'
case ${1^^} in
SEVERE|WARNING|INFO|CONFIG|FINE|FINER|FINEST)
logLevel=$1
shift
;;
ERROR)
logLevel='SEVERE'
shift
;;
WARNING*)
logLevel='WARNING'
;;
ERROR*|SEVERE*)
logLevel='SEVERE'
;;
INFO*)
logLevel='INFO'
;;
CONFIG*)
logLevel='CONFIG'
;;
FINEST*)
logLevel='FINEST'
;;
FINER*)
logLevel='FINER'
;;
FINE*)
logLevel='FINE'
;;
esac
logPrefix="@[`timestamp`][$logLoc][$logLevel]"
case $logMode in
-pipe)
(
# IFS='' causes read line to preserve leading spaces
# -r cause read to treat backslashes as-is, e.g. '\n' --> '\n'
IFS=''
while read -r line; do
echo "$logPrefix" "$@" "$line"
done
)
;;
-n)
echo -n "$logPrefix" "$@"
;;
*)
echo "$logPrefix" "$@"
;;
esac
)
}
#
# tracen
# purpose: same as "trace -n"
#
function tracen() {
(
set +x
trace -cloc "`basename $0`:${BASH_LINENO[0]}" -n "$@"
)
}
#
# tracePipe
# purpose: same as "trace -pipe"
#
function tracePipe() {
(
set +x
trace -cloc "`basename $0`:${BASH_LINENO[0]}" -pipe "$@"
)
}
#
# checkEnv [-q] envvar1 envvar2 ...
#
# purpose: Check and trace the values of the provided env vars.
# If any env vars don't exist or are empty, return non-zero
# and trace an '[SEVERE]'.
# (Pass '-q' to suppress FINE tracing.)
#
# sample: checkEnv HOST NOTSET1 USER NOTSET2
# @[2018-10-05T22:48:04.368 UTC][FINE] HOST='esscupcakes'
# @[2018-10-05T22:48:04.393 UTC][FINE] USER='friendly'
# @[2018-10-05T22:48:04.415 UTC][SEVERE] The following env vars are missing or empty: NOTSET1 NOTSET2
#
function checkEnv() {
local do_fine="true"
if [ "$1" = "-q" ]; then
do_fine="false"
shift
fi
local not_found=""
while [ ! -z "${1}" ]; do
if [ -z "${!1}" ]; then
not_found="$not_found ${1}"
else
[ "$do_fine" = "true" ] && trace FINE "${1}='${!1}'"
fi
shift
done
if [ ! -z "${not_found}" ]; then
trace SEVERE "The following env vars are missing or empty: ${not_found}"
return 1
fi
return 0
}
# traceEnv:
# purpose: trace a curated set of env vars
# warning: we purposely avoid dumping all env vars
# (K8S provides env vars with potentially sensitive network information)
#
function traceEnv() {
local env_var
trace FINE "Env vars ${*}:"
for env_var in \
DOMAIN_UID \
NAMESPACE \
SERVER_NAME \
SERVICE_NAME \
ADMIN_NAME \
AS_SERVICE_NAME \
ADMIN_PORT \
ADMIN_PORT_SECURE \
USER_MEM_ARGS \
JAVA_OPTIONS \
FAIL_BOOT_ON_SITUATIONAL_CONFIG_ERROR \
STARTUP_MODE \
DOMAIN_HOME \
LOG_HOME \
SERVER_OUT_IN_POD_LOG \
DATA_HOME \
KEEP_DEFAULT_DATA_HOME \
EXPERIMENTAL_LINK_SERVER_DEFAULT_DATA_DIR \
JAVA_HOME \
ORACLE_HOME \
WL_HOME \
MW_HOME \
NODEMGR_HOME \
INTROSPECT_HOME \
PATH
do
echo " ${env_var}='${!env_var}'"
done
}
#
# exportEffectiveDomainHome
# if DOMAIN_HOME='${ORACLE_HOME}/user_projects/domains':
# 1) look for a config.xml in DOMAIN_HOME/config and
# and in DOMAIN_HOME/*/config
# 2) Export DOMAIN_HOME to reflect the actual location
# 3) Trace an Error and return non-zero if not found or more than 1 found
#
function exportEffectiveDomainHome() {
local count=0
local cur_domain_home=""
local eff_domain_home=""
local found_configs=""
exportInstallHomes
if [ ! "${DOMAIN_HOME?}" = "${ORACLE_HOME}/user_projects/domains" ]; then
# nothing to do
return 0
fi
local tfile=$(mktemp /tmp/homes.`basename $0`.XXXXXXXXX)
rm -f $tfile
ls -d ${DOMAIN_HOME} ${DOMAIN_HOME}/* > $tfile
exec 22<> $tfile
while read -u 22 cur_domain_home; do
config_path="${cur_domain_home}/config/config.xml"
if [ ! -f "${config_path}" ]; then
continue
fi
count=$((count + 1))
if [ $count -eq 1 ]; then
eff_domain_home="${cur_domain_home}"
found_configs="'${config_path}'"
else
found_configs="${found_configs}, '${config_path}'"
fi
done
rm -f $tfile
if [ $count -eq 1 ]; then
export DOMAIN_HOME="${eff_domain_home}"
return 0
fi
if [ $count -eq 0 ]; then
trace SEVERE "No config.xml found at DOMAIN_HOME/config/config.xml or DOMAIN_HOME/*/config/config.xml, DOMAIN_HOME='$DOMAIN_HOME'. Check your 'domainHome' setting in your WebLogic Operator Domain resource, and your pv/pvc mount location (if any)."
return 1
fi
# if we get this far, count is > 1
trace SEVERE "More than one config.xml found at DOMAIN_HOME/config/config.xml and DOMAIN_HOME/*/config/config.xml, DOMAIN_HOME='$DOMAIN_HOME': ${found_configs}. Configure your 'domainHome' setting in your WebLogic Operator Domain resource to reference a single WebLogic domain."
return 1
}
# versionCmp
# Compares two wl versions $1 $2 up to the length of $2
# Expects form N.N.N.N
# Uses '0' for an N in $1 if $1 is shorter than $2
# echo "1" if v1 > v2
# echo "-1" if v1 < v2
# echo "0" if v1 == v2
versionCmp()
{
IFS='.' read -r -a v1_arr <<< "`echo $1`"
IFS='.' read -r -a v2_arr <<< "`echo $2`"
for i in "${!v2_arr[@]}"
do
[ ${v1_arr[i]:-0} -gt ${v2_arr[i]:-0} ] && echo "1" && return
[ ${v1_arr[i]:-0} -lt ${v2_arr[i]:-0} ] && echo "-1" && return
done
echo "0"
}
# versionGE
# return success if WL v1 >= v2
versionGE()
{
[ `versionCmp "$1" "$2"` -ge 0 ] && return 0
return 1
}
# versionEQ
# return success if v1 == v2
versionEQ()
{
[ `versionCmp "$1" "$2"` -eq 0 ] && return 0
return 1
}
# hasWebLogicPatches
# check for the given patch numbers in the install inventory,
# and return 1 if not found
# - if we can't find the install inventory then we
# assume the patch is there...
# - we parse the install inventory as this is far faster than
# using opatch or weblogic.version
hasWebLogicPatches()
{
local reg_file=$ORACLE_HOME/inventory/registry.xml
[ ! -f $reg_file ] && return 0
for pnum in "$@"; do
grep --silent "patch-id=\"$1\"" $reg_file || return 1
done
}
# getWebLogicVersion
# parse wl version from install inventory
# - if we can't get a version number then we return
# a high dummy version number that's sufficient
# to pass version checks "9999.9999.9999.9999"
# - we parse the install inventory as this is far faster than
# using opatch or weblogic.version
getWebLogicVersion()
{
local reg_file=$ORACLE_HOME/inventory/registry.xml
[ ! -f $reg_file ] && echo "9999.9999.9999.9999" && return
# The following grep captures both "WebLogic Server" and "WebLogic Server for FMW"
local wlver="`grep 'name="WebLogic Server.*version=' $reg_file \
| sed 's/.*version="\([0-9.]*\)".*/\1/g'`"
echo ${wlver:-"9999.9999.9999.9999"}
}
# checkWebLogicVersion
# check if the WL version is supported by the Operator
# - skip check if SKIP_WL_VERSION_CHECK = "true"
# - log an error if WL version < 12.2.1.3
# - log an error if WL version == 12.2.1.3 && patch 29135930 is missing
# - you can override the required 12.2.1.3 patches by exporting
# global WL12213REQUIREDPATCHES to an empty string or to other
# patch number(s)
# - return 1 if logged an error
# - return 0 otherwise
checkWebLogicVersion()
{
[ "$SKIP_WL_VERSION_CHECK" = "true" ] && return 0
local cur_wl_ver="`getWebLogicVersion`"
local exp_wl_ver="12.2.1.3"
local exp_wl_12213_patches="${WL12213REQUIREDPATCHES:-"29135930"}"
if versionEQ "$cur_wl_ver" "12.2.1.3" ; then
if ! hasWebLogicPatches $exp_wl_12213_patches ; then
trace SEVERE "The Operator requires that WebLogic version '12.2.1.3' have patch '$exp_wl_12213_patches'. To bypass this check, set env var SKIP_WL_VERSION_CHECK to 'true'."
return 1
fi
fi
if versionEQ "$cur_wl_ver" "9999.9999.9999.9999" ; then
trace INFO "Could not determine WebLogic version. Assuming version is fine. (The Operator requires WebLogic version '${exp_wl_ver}' or higher, and also requires patches '$exp_wl_12213_patches' for version '12.2.1.3'.)."
return 0
fi
if versionGE "$cur_wl_ver" "${exp_wl_ver}" ; then
trace INFO "WebLogic version='$cur_wl_ver'. Version check passed. (The Operator requires WebLogic version '${exp_wl_ver}' or higher)."
else
trace SEVERE "WebLogic version='$cur_wl_ver' and the Operator requires WebLogic version '${exp_wl_ver}' or higher. To bypass this check, set env var SKIP_WL_VERSION_CHECK to 'true'."
return 1
fi
return 0
}
#
# getAdminUrl
# purpose: Get the admin URL used to connect to the admin server internally, e.g. when starting a managed server
# sample:
# ADMIN_URL=$(getAdminServerUrl)
#
function getAdminServerUrl() {
local admin_protocol="http"
if [ "${ISTIO_ENABLED}" = "true" ]; then
admin_protocol="t3"
fi
if [ "${ADMIN_SERVER_PORT_SECURE}" = "true" ]; then
if [ "${ISTIO_ENABLED}" = "true" ]; then
admin_protocol="t3s"
else
admin_protocol="https"
fi
fi
echo ${admin_protocol}://${AS_SERVICE_NAME}:${ADMIN_PORT}
}
function waitForShutdownMarker() {
#
# Wait forever. Kubernetes will monitor this pod via liveness and readyness probes.
#
trace "Wait indefinitely so that the Kubernetes pod does not exit and try to restart"
while true; do
if [ -e ${SHUTDOWN_MARKER_FILE} ] ; then
exit 0
fi
sleep 3
done
}
#
# Define helper fn for failure debugging
# If the livenessProbeSuccessOverride file is available, do not exit from startServer.sh.
# This will cause the pod to stay up instead of restart.
# (The liveness probe checks the same file.)
#
function exitOrLoop {
if [ -f /weblogic-operator/debug/livenessProbeSuccessOverride ]
then
waitForShutdownMarker
else
exit 1
fi
}
#
# Define helper fn to create a folder
#
function createFolder {
mkdir -m 750 -p $1
if [ ! -d $1 ]; then
trace SEVERE "Unable to create folder $1"
exitOrLoop
fi
}
# Returns the count of the number of files in the specified directory
function countFilesInDir() {
dir=${1}
cnt=`find ${dir} -type f | wc -l`
[ $? -ne 0 ] && trace SEVERE "failed determining number of files in '${dir}'" && exitOrLoop
trace "file count in directory '${dir}': ${cnt}"
return ${cnt}
}
# Creates symbolic link from source directory to target directory
function createSymbolicLink() {
targetDir=${1}
sourceDir=${2}
/bin/ln -sFf ${targetDir} ${sourceDir}
[ $? -ne 0 ] && trace SEVERE "failed to create symbolic link from '${sourceDir}' to '${targetDir}'" && exitOrLoop
trace "Created symbolic link from '${sourceDir}' to '${targetDir}'"
}
# The following function will attempt to create a symbolic link from the server's default 'data' directory,
# (${DOMAIN_HOME}/servers/${SERVER_NAME}/data) to the centralized data directory specified by the
# 'dataHome' attribute of the CRD ($DATA_HOME/${SERVER_NAME}/data). If both the ${DOMAIN_HOME}/servers/${SERVER_NAME}/data
# and $DATA_HOME/${SERVER_NAME}/data directories contain persistent files that the Operator can't resolve
# than an error message is logged asking the user to manually resolve the files and then exit.
function linkServerDefaultDir() {
# if server's default 'data' directory (${DOMAIN_HOME}/servers/${SERVER_NAME}/data) does not exist than create
# symbolic link to location specified by $DATA_HOME/${SERVER_NAME}/data
if [ ! -d ${DOMAIN_HOME}/servers/${SERVER_NAME}/data ]; then
trace "'${DOMAIN_HOME}/servers/${SERVER_NAME}/data' does NOT exist as a directory"
# Create the server's directory in $DOMAIN_HOME/servers
if [ ! -d ${DOMAIN_HOME}/servers/${SERVER_NAME} ]; then
trace "Creating directory '${DOMAIN_HOME}/servers/${SERVER_NAME}'"
createFolder ${DOMAIN_HOME}/servers/${SERVER_NAME}
else
trace "'${DOMAIN_HOME}/servers/${SERVER_NAME}' already exists as a directory"
fi
# If server's 'data' directory is not already a symbolic link than create the symbolic link to
# $DATA_HOME/${SERVER_NAME}/data
if [ ! -L ${DOMAIN_HOME}/servers/${SERVER_NAME}/data ]; then
createSymbolicLink ${DATA_HOME}/${SERVER_NAME}/data ${DOMAIN_HOME}/servers/${SERVER_NAME}/data
else
trace "'${DOMAIN_HOME}/servers/${SERVER_NAME}/data' is already a symbolic link"
fi
else
trace "'${DOMAIN_HOME}/servers/${SERVER_NAME}/data' exists as a directory"
# server's default 'data' directory (${DOMAIN_HOME}/servers/${SERVER_NAME}/data) exists so first verify it's
# not a symbolic link. If it's already a symbolic link than there is nothing to do.
if [ -L ${DOMAIN_HOME}/servers/${SERVER_NAME}/data ]; then
trace "'${DOMAIN_HOME}/servers/${SERVER_NAME}/data' is already a symbolic link"
else
# Server's default 'data' directory (${DOMAIN_HOME}/servers/${SERVER_NAME}/data) exists and is not
# a symbolic link so must be a directory.
# count number of files found under directory ${DOMAIN_HOME}/servers/${SERVER_NAME}/data
countFilesInDir ${DOMAIN_HOME}/servers/${SERVER_NAME}/data
fileCountServerDomainHomeDir=$?
# count number of files found under directory ${DATA_HOME}/${SERVER_NAME}/data
countFilesInDir ${DATA_HOME}/${SERVER_NAME}/data
fileCountServerDataDir=$?
# Use file counts to determine whether or not we can create a symbolic link to centralize
# data directory in specified ${DATA_HOME}/${SERVER_NAME}/data directory.
if [ ${fileCountServerDataDir} -eq 0 ]; then
if [ ${fileCountServerDomainHomeDir} -ne 0 ]; then
cp -rf ${DOMAIN_HOME}/servers/${SERVER_NAME}/data ${DATA_HOME}/${SERVER_NAME}
[ $? -ne 0 ] && trace SEVERE "failed to copy directory/files from '${DOMAIN_HOME}/servers/${SERVER_NAME}/data' to '${DATA_HOME}/${SERVER_NAME}' directory" && exitOrLoop
trace "Recursively copied directory/files from '${DOMAIN_HOME}/servers/${SERVER_NAME}/data' to '${DATA_HOME}/${SERVER_NAME}' directory"
else
trace "'${DOMAIN_HOME}/servers/${SERVER_NAME}/data' directory is empty"
fi
# forcefully delete the server's data directory so we can create symbolic link
rm -rf ${DOMAIN_HOME}/servers/${SERVER_NAME}/data
[ $? -ne 0 ] && trace SEVERE "failed to delete '${DOMAIN_HOME}/servers/${SERVER_NAME}/data' directory" && exitOrLoop
trace "Deleted directory '${DOMAIN_HOME}/servers/${SERVER_NAME}/data'"
# Create the symbolic link from server's data directory to $DATA_HOME
createSymbolicLink ${DATA_HOME}/${SERVER_NAME}/data ${DOMAIN_HOME}/servers/${SERVER_NAME}/data
elif [ ${fileCountServerDataDir} -ne 0 ]; then
if [ ${fileCountServerDomainHomeDir} -ne 0 ]; then
trace SEVERE "The directory located in DOMAIN_HOME at '${DOMAIN_HOME}/servers/${SERVER_NAME}/data' and the directory located in the domain resource dataHome directory at '${DATA_HOME}/${SERVER_NAME}/data' both contain persistent files and the Operator cannot resolve which directory to use. You must manually move any persistent files from the '${DOMAIN_HOME}/servers/${SERVER_NAME}/data' directory to '${DATA_HOME}/${SERVER_NAME}/data', or remove them, and then delete the '${DOMAIN_HOME}/servers/${SERVER_NAME}/data' directory. Once this is done you can then restart the Domain. Alternatively, you can avoid this validation by setting the 'KEEP_DEFAULT_DATA_HOME' environment variable, in which case WebLogic custom and default stores will use the dataHome location (ignoring any files in the DOMAIN_HOME location), and other services will use the potentially ephemeral DOMAIN_HOME location for their files."
exitOrLoop
else
# forcefully delete the server's data directory so we can create symbolic link
rm -rf ${DOMAIN_HOME}/servers/${SERVER_NAME}/data
[ $? -ne 0 ] && trace SEVERE "failed to delete '${DOMAIN_HOME}/servers/${SERVER_NAME}/data' directory" && exitOrLoop
trace "Deleted directory '${DOMAIN_HOME}/servers/${SERVER_NAME}/data'"
# Create the symbolic link from server's data directory to $DATA_HOME
createSymbolicLink ${DATA_HOME}/${SERVER_NAME}/data ${DOMAIN_HOME}/servers/${SERVER_NAME}/data
fi
fi
fi
fi
}
#
# adjustPath
# purpose: Prepend $PATH with $JAVA_HOME/bin if $JAVA_HOME is set
# and if $JAVA_HOME/bin is not already in $PATH
#
function adjustPath() {
if [ ! -z ${JAVA_HOME} ]; then
if [[ ":$PATH:" != *":${JAVA_HOME}/bin:"* ]]; then
export PATH="${JAVA_HOME}/bin:$PATH"
fi
fi
}
|
#!/bin/bash -e
# Run inside https://hub.docker.com/r/devexpress/devextreme-build/
trap "echo 'Interrupted!' && kill -9 0" TERM INT
export DEVEXTREME_DOCKER_CI=true
export NUGET_PACKAGES=$PWD/dotnet_packages
function run_lint {
npm i eslint eslint-plugin-spellcheck stylelint stylelint-config-standard npm-run-all
npm run lint
}
function run_ts {
target=./ts/dx.all.d.ts
cp $target $target.current
npm i
npm run update-ts
if ! diff $target.current $target -U 5 > $target.diff; then
echo "FAIL: $target is outdated:"
cat $target.diff | sed "1,2d"
exit 1
else
echo "TS is up-to-date"
fi
npx gulp ts-compilation-check ts-jquery-check npm-ts-modules-check
}
function run_test {
export DEVEXTREME_QUNIT_CI=true
local port=`node -e "console.log(require('./ports.json').qunit)"`
local url="http://localhost:$port/run?notimers=true&nojquery=true"
local runner_pid
local runner_result=0
[ -n "$CONSTEL" ] && url="$url&constellation=$CONSTEL"
if [ "$HEADLESS" != "true" ]; then
Xvfb :99 -ac -screen 0 1200x600x24 &
x11vnc -display :99 2>/dev/null &
fi
npm i
npm run build
# See https://github.com/DevExpress/DevExtreme/pull/1251
chmod 755 $(find dotnet_packages -type d)
dotnet ./testing/runner/bin/runner.dll --single-run & runner_pid=$!
while ! httping -qc1 $url; do
sleep 1
done
case "$BROWSER" in
"firefox")
local firefox_args="-profile /firefox-profile $url"
[ "$HEADLESS" == "true" ] && firefox_args="-headless $firefox_args"
firefox --version
firefox $firefox_args &
;;
*)
google-chrome-stable --version
if [ "$HEADLESS" == "true" ]; then
google-chrome-stable \
--no-sandbox \
--disable-gpu \
--user-data-dir=/tmp/chrome \
--headless \
--remote-debugging-address=0.0.0.0 \
--remote-debugging-port=9222 \
$url &>headless-chrome.log &
else
dbus-launch --exit-with-session google-chrome-stable \
--no-sandbox \
--disable-gpu \
--user-data-dir=/tmp/chrome \
--no-first-run \
--no-default-browser-check \
--disable-translate \
$url &
fi
;;
esac
wait $runner_pid || runner_result=1
exit $runner_result
}
function run_test_themebuilder {
dotnet build build/build-dotnet.sln
npm i
npm run build-themes
npm run build-themebuilder-assets
cd themebuilder
npm i
npm run test
}
function run_test_functional {
npm run build
npm run test-functional -- --browsers chrome:headless
}
echo "node $(node -v), npm $(npm -v), dotnet $(dotnet --version)"
case "$TARGET" in
"lint") run_lint ;;
"ts") run_ts ;;
"test") run_test ;;
"test_themebuilder") run_test_themebuilder ;;
"test_functional") run_test_functional ;;
*)
echo "Unknown target"
exit 1
;;
esac
|
package me.insidezhou.southernquiet.job.driver;
import me.insidezhou.southernquiet.Constants;
import me.insidezhou.southernquiet.amqp.rabbit.*;
import me.insidezhou.southernquiet.job.AmqpJobAutoConfiguration;
import me.insidezhou.southernquiet.job.JobProcessor;
import me.insidezhou.southernquiet.logging.SouthernQuietLogger;
import me.insidezhou.southernquiet.logging.SouthernQuietLoggerFactory;
import me.insidezhou.southernquiet.util.Amplifier;
import org.springframework.amqp.core.Queue;
import org.springframework.amqp.core.*;
import org.springframework.amqp.rabbit.annotation.RabbitListenerConfigurer;
import org.springframework.amqp.rabbit.config.DirectRabbitListenerContainerFactory;
import org.springframework.amqp.rabbit.config.SimpleRabbitListenerEndpoint;
import org.springframework.amqp.rabbit.connection.ConnectionFactory;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.amqp.rabbit.listener.RabbitListenerEndpointRegistrar;
import org.springframework.amqp.rabbit.transaction.RabbitTransactionManager;
import org.springframework.amqp.support.converter.SmartMessageConverter;
import org.springframework.boot.autoconfigure.amqp.RabbitProperties;
import org.springframework.context.ApplicationContext;
import org.springframework.context.Lifecycle;
import org.springframework.core.ParameterizedTypeReference;
import org.springframework.core.annotation.AnnotatedElementUtils;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
import java.util.stream.Collectors;
public class AmqpJobProcessorManager extends AbstractJobProcessorManager implements Lifecycle, RabbitListenerConfigurer {
private final static SouthernQuietLogger log = SouthernQuietLoggerFactory.getLogger(AmqpJobProcessorManager.class);
private final SmartMessageConverter messageConverter;
private final ConnectionFactory connectionFactory;
private final List<ProcessorEndpoint> processorEndpoints = new ArrayList<>();
private final AmqpAutoConfiguration.Properties amqpProperties;
private final AmqpJobAutoConfiguration.Properties amqpJobProperties;
private final Amplifier amplifier;
private final RabbitProperties rabbitProperties;
private final AmqpAdmin amqpAdmin;
private final RabbitTemplate rabbitTemplate;
public AmqpJobProcessorManager(AmqpAdmin amqpAdmin,
AmqpJobArranger<?> jobArranger,
Amplifier amplifier,
AmqpJobAutoConfiguration.Properties amqpJobProperties,
AmqpAutoConfiguration.Properties amqpProperties,
RabbitTransactionManager transactionManager,
RabbitProperties rabbitProperties,
ApplicationContext applicationContext
) {
super(applicationContext);
this.amplifier = amplifier;
this.amqpAdmin = amqpAdmin;
this.amqpJobProperties = amqpJobProperties;
this.amqpProperties = amqpProperties;
this.rabbitProperties = rabbitProperties;
this.messageConverter = jobArranger.getMessageConverter();
this.connectionFactory = transactionManager.getConnectionFactory();
this.rabbitTemplate = new RabbitTemplate(connectionFactory);
}
@Override
public void configureRabbitListeners(RabbitListenerEndpointRegistrar registrar) {
processorEndpoints.stream().collect(Collectors.groupingBy(ProcessorEndpoint::getRouting)).forEach((routing, group) -> {
ProcessorEndpoint endpoint = group.get(0);
String processorName = endpoint.getProcessorName();
JobProcessor processorAnnotation = endpoint.getProcessorAnnotation();
Amplifier amplifier = this.amplifier;
if (!StringUtils.isEmpty(processorAnnotation.amplifierBeanName())) {
amplifier = applicationContext.getBean(processorAnnotation.amplifierBeanName(), Amplifier.class);
}
DirectRabbitListenerContainerFactoryConfigurer containerFactoryConfigurer = new DirectRabbitListenerContainerFactoryConfigurer(
rabbitProperties,
new AmqpMessageRecover(
rabbitTemplate,
amplifier,
Constants.AMQP_DEFAULT,
getDeadRouting(amqpJobProperties.getNamePrefix(), processorAnnotation, processorName),
AbstractAmqpJobArranger.getDelayRouting(amqpJobProperties.getNamePrefix(), processorAnnotation.job()),
getRetryRouting(amqpJobProperties.getNamePrefix(), processorAnnotation, processorName),
amqpProperties
),
amqpProperties
);
DirectRabbitListenerContainerFactory factory = new DirectRabbitListenerContainerFactory();
factory.setMessageConverter(messageConverter);
factory.setAcknowledgeMode(amqpProperties.getAcknowledgeMode());
factory.setConsumersPerQueue(processorAnnotation.concurrency());
containerFactoryConfigurer.configure(factory, connectionFactory);
SimpleRabbitListenerEndpoint rabbitListenerEndpoint = new SimpleRabbitListenerEndpoint();
rabbitListenerEndpoint.setId(UUID.randomUUID().toString());
rabbitListenerEndpoint.setQueueNames(endpoint.getRouting());
rabbitListenerEndpoint.setAdmin(amqpAdmin);
rabbitListenerEndpoint.setMessageListener(endpoint.getMessageListener());
registrar.registerEndpoint(rabbitListenerEndpoint, factory);
});
}
@Override
protected void initProcessor(JobProcessor processor, Object bean, Method method) {
String processorName = getProcessorName(processor, method);
String processorRouting = getProcessorRouting(processor, processorName);
processorEndpoints.stream()
.filter(processorEndpoint -> processor.job() == processorEndpoint.getProcessorAnnotation().job() && processorName.equals(processorEndpoint.getProcessorName()))
.findAny()
.ifPresent(processorEndpoint -> log.message("任务处理器重复")
.context(context -> {
context.put("queue", processorRouting);
context.put("processor", bean.getClass().getName());
context.put("processorName", processorName);
context.put("job", processor.job().getSimpleName());
})
);
declareExchangeAndQueue(processor, processorName);
DelayedMessage delayedAnnotation = AnnotatedElementUtils.findMergedAnnotation(processor.job(), DelayedMessage.class);
ProcessorEndpoint processorEndpoint = new ProcessorEndpoint();
processorEndpoint.setProcessorName(processorName);
processorEndpoint.setProcessorAnnotation(processor);
processorEndpoint.setRouting(processorRouting);
processorEndpoint.setMessageListener(generateMessageListener(
ParameterizedTypeReference.forType(processor.job()),
processorRouting,
processor,
bean,
method,
processorName,
delayedAnnotation
));
processorEndpoints.add(processorEndpoint);
}
protected MessageListener generateMessageListener(ParameterizedTypeReference<?> typeReference,
String routing,
JobProcessor processor,
Object bean,
Method method,
String processorName,
DelayedMessage delayedAnnotation
) {
return message -> {
Object job = messageConverter.fromMessage(message, typeReference);
onMessageReceived(routing, bean, processorName, job, message);
Object[] parameters = Arrays.stream(method.getParameters())
.map(parameter -> {
Class<?> parameterClass = parameter.getType();
if (parameterClass.isInstance(job)) {
return job;
}
else if (parameterClass.isInstance(processor)) {
return processor;
}
else if (parameterClass.equals(DelayedMessage.class)) {
return delayedAnnotation;
}
else {
log.message("不支持在任务处理器中使用此类型的参数")
.context("parameter", parameter.getClass())
.context("job", processor.job())
.warn();
try {
return parameterClass.getDeclaredConstructor().newInstance();
}
catch (Exception e) {
return null;
}
}
})
.toArray();
try {
method.invoke(bean, parameters);
}
catch (RuntimeException e) {
log.message("任务处理器抛出异常").exception(e).error();
throw e;
}
catch (InvocationTargetException e) {
Throwable target = e.getTargetException();
log.message("任务处理器抛出异常").exception(target).error();
if (target instanceof RuntimeException) {
throw (RuntimeException) target;
}
throw new RuntimeException(target);
}
catch (Exception e) {
throw new RuntimeException(e);
}
};
}
protected void onMessageReceived(
String routing,
Object bean,
String listenerName,
Object job,
Message message
) {
log.message("接到任务")
.context(context -> {
context.put("queue", routing);
context.put("processor", bean.getClass().getName());
context.put("processorName", listenerName);
context.put("job", job.getClass().getSimpleName());
context.put("message", message);
})
.debug();
}
public static String getDeadRouting(String prefix, JobProcessor processor, String processorName) {
return AbstractAmqpJobArranger.getRouting(
prefix,
suffix("DEAD." + AbstractAmqpJobArranger.getQueueSource(processor.job()), processorName));
}
public static String getRetryRouting(String prefix, JobProcessor processor, String processorName) {
return AbstractAmqpJobArranger.getRouting(
prefix,
suffix("RETRY." + AbstractAmqpJobArranger.getQueueSource(processor.job()), processorName));
}
private String getProcessorRouting(JobProcessor processor, String processorName) {
return suffix(AbstractAmqpJobArranger.getRouting(amqpJobProperties.getNamePrefix(), processor.job()), processorName);
}
private String getProcessorName(JobProcessor processor, Method method) {
String processorName = processor.name();
if (StringUtils.isEmpty(processorName)) {
processorName = method.getName();
}
Assert.hasText(processorName, "处理器的名称不能为空");
return processorName;
}
public static String suffix(String routing, String processorName) {
return routing + "#" + processorName;
}
private void declareExchangeAndQueue(JobProcessor processor, String processorName) {
String routing = AbstractAmqpJobArranger.getRouting(amqpJobProperties.getNamePrefix(), processor.job());
String delayRouting = AbstractAmqpJobArranger.getDelayRouting(amqpJobProperties.getNamePrefix(), processor.job());
String processorRouting = getProcessorRouting(processor, processorName);
Exchange exchange = new FanoutExchange(routing, true, false);
Queue queue = new Queue(processorRouting);
amqpAdmin.declareExchange(exchange);
amqpAdmin.declareQueue(queue);
amqpAdmin.declareBinding(BindingBuilder.bind(queue).to(exchange).with(processorRouting).noargs());
Map<String, Object> deadQueueArgs = new HashMap<>();
deadQueueArgs.put(Constants.AMQP_DLX, Constants.AMQP_DEFAULT);
deadQueueArgs.put(Constants.AMQP_DLK, queue.getName());
Queue deadRouting = new Queue(getDeadRouting(amqpJobProperties.getNamePrefix(), processor, processorName), true, false, false, deadQueueArgs);
amqpAdmin.declareQueue(deadRouting);
Map<String, Object> exchangeArguments = new HashMap<>();
exchangeArguments.put(Constants.AMQP_DELAYED_TYPE, "direct");
Exchange delayExchange = new CustomExchange(
delayRouting,
Constants.AMQP_DELAYED_EXCHANGE,
true,
false,
exchangeArguments
);
amqpAdmin.declareExchange(delayExchange);
Map<String, Object> retryQueueArgs = new HashMap<>();
retryQueueArgs.put(Constants.AMQP_DLX, Constants.AMQP_DEFAULT);
retryQueueArgs.put(Constants.AMQP_DLK, queue.getName());
retryQueueArgs.put(Constants.AMQP_MESSAGE_TTL, 0); //这里的硬编码是为了消息到达队列之后立即转发至相应的工作队列。下同。
Queue retryQueue = new Queue(
getRetryRouting(amqpJobProperties.getNamePrefix(), processor, processorName),
true,
false,
false,
retryQueueArgs
);
amqpAdmin.declareQueue(retryQueue);
amqpAdmin.declareBinding(BindingBuilder.bind(retryQueue).to(delayExchange).with(retryQueue.getName()).noargs());
Map<String, Object> delayQueueArgs = new HashMap<>();
delayQueueArgs.put(Constants.AMQP_DLX, routing);
delayQueueArgs.put(Constants.AMQP_DLK, routing);
delayQueueArgs.put(Constants.AMQP_MESSAGE_TTL, 0);
Queue delayQueue = new Queue(
AbstractAmqpJobArranger.getDelayRouting(amqpJobProperties.getNamePrefix(), processor.job()),
true,
false,
false,
delayQueueArgs
);
amqpAdmin.declareQueue(delayQueue);
amqpAdmin.declareBinding(BindingBuilder.bind(delayQueue).to(delayExchange).with(delayQueue.getName()).noargs());
}
@Override
public void start() {
rabbitTemplate.start();
}
@Override
public void stop() {
rabbitTemplate.stop();
}
@Override
public boolean isRunning() {
return rabbitTemplate.isRunning();
}
static class ProcessorEndpoint {
private JobProcessor processorAnnotation;
private String processorName;
private String routing;
private MessageListener messageListener;
public JobProcessor getProcessorAnnotation() {
return processorAnnotation;
}
public void setProcessorAnnotation(JobProcessor processorAnnotation) {
this.processorAnnotation = processorAnnotation;
}
public String getProcessorName() {
return processorName;
}
public void setProcessorName(String processorName) {
this.processorName = processorName;
}
public String getRouting() {
return routing;
}
public void setRouting(String routing) {
this.routing = routing;
}
public MessageListener getMessageListener() {
return messageListener;
}
public void setMessageListener(MessageListener messageListener) {
this.messageListener = messageListener;
}
}
}
|
package training.binarysearchtree;
import training.binarytree.TreeNode;
import org.junit.jupiter.api.Test;
import java.util.function.BiFunction;
import static training.binarytree.TreeNode.newTree;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static util.datastructure.BinaryTreeNode.contentEquals;
import static util.datastructure.BinaryTreeNode.isValid;
/**
* 给定 BST 的根节点引用和 key,删除 BST 中具有给定 key 的节点。返回 BST 的根节点引用(可能已更新)。
* 你能给出时间复杂度为 O(height of tree) 的实现吗?
*
* 例 1:
* Input: root = [5,3,6,2,4,null,7], key = 3
* Output: [5,4,6,2,null,null,7]
* Explanation:
* 5 5
* / \ / \
* 3 6 => 4 6
* / \ \ / \
* 2 4 7 2 7
*
* 例 2:
* Input: root = [5,3,6,2,4,null,7], key = 0
* Output: [5,3,6,2,4,null,7]
* Explanation:
* 这棵树不包含值为 0 的结点
*
* 例 3:
* Input: root = [], key = 0
* Output: []
*
* 约束:
* - 结点数量范围为 [1, 10**4]
* - -10**5 <= Node.val <= 10**5
* - 所有结点值是唯一的
* - -10**5 <= key <= 10**5
*/
public class E450_Medium_DeleteNodeInABST {
static void test(BiFunction<TreeNode, Integer, TreeNode> method) {
TreeNode root = method.apply(newTree(5,3,6,2,4,null,7), 3);
assertTrue(isValid(root));
assertTrue(contentEquals(root, newTree(5,4,6,2,null,null,7)));
root = method.apply(newTree(5,3,6,2,4,null,7), 0);
assertTrue(isValid(root));
assertTrue(contentEquals(root, newTree(5,3,6,2,4,null,7)));
assertNull(method.apply(null, 0));
assertNull(method.apply(newTree(1), 1));
}
/**
* LeetCode 耗时:0ms - 100%
*/
public TreeNode deleteNode(TreeNode root, int key) {
// 查找结点
TreeNode p = root, parent = root;
while (p != null) {
if (p.val == key)
break;
else if (p.val < key) {
parent = p;
p = p.right;
} else {
parent = p;
p = p.left;
}
}
// 如果找到了
if (p != null) {
TreeNode replacement;
// 查找用来替换的结点
if (p.left == null)
replacement = p.right;
else if (p.right == null)
replacement = p.left;
else {
TreeNode rightMin = p.right, rightMinParent = p.right;
while (rightMin.left != null) {
rightMinParent = rightMin;
rightMin = rightMin.left;
}
rightMin.left = p.left;
if (rightMin != rightMinParent) {
rightMinParent.left = rightMin.right;
rightMin.right = p.right;
}
replacement = rightMin;
}
// 进行替换
if (p == parent)
return replacement;
else if (p == parent.left)
parent.left = replacement;
else
parent.right = replacement;
}
return root;
}
@Test
public void testDeleteNode() {
test(this::deleteNode);
}
}
|
package org.museautomation.ui.extend.actions;
import org.slf4j.*;
/**
* @author <NAME> (see LICENSE.txt for license details)
*/
public abstract class UndoableAction extends BaseEditAction
{
// implement the undo here
protected abstract boolean undoImplementation();
public boolean execute(UndoStack undo_stack)
{
_undo_stack = undo_stack;
boolean success;
try
{
success = executeImplementation();
if (!success)
LOG.error("command failed: " + getClass().getSimpleName());
}
catch (Exception e)
{
LOG.error("command " + getClass().getSimpleName() + " failed with an exception", e);
success = false;
}
if (success && _undo_stack != null)
_undo_stack.push(this);
return success;
}
public boolean undo()
{
boolean success = undoImplementation();
if (!success)
LOG.error("Unable to undo a command: " + getClass().getSimpleName());
return success;
}
protected UndoStack _undo_stack;
final static Logger LOG = LoggerFactory.getLogger(UndoableAction.class);
}
|
const rule = require("../accessible-emoji");
const makeRuleTester = require("./makeRuleTester");
makeRuleTester("accessible-emoji", rule, {
valid: [
"<div />",
"<span />",
"<span role='img' aria-label='Panda face'>😰</span>",
"<span role='img' aria-label='Snowman'>☃</span>",
"<span aria-hidden>😰</span>",
"<div aria-hidden><span>😰</span></div>"
],
invalid: [
"<span>😰</span>",
"<i role='img' aria-label='Panda face'>😰</i>",
"<Foo>😰</Foo>",
"<span>foo 😰 bar</span>"
]
});
|
'use strict';
const assert = require('chai').assert;
const random = require('../../../src/util/random.js');
describe('random.js', function () {
describe('#', () => {
it('default arguments', () => {
let num = random();
assert.isNumber(num);
assert.notStrictEqual(String(num).match(/\./), null);
});
it('random format', () => {
let str = `name: ${random.letter().repeat(3)} id: ${String(random.int()).repeat(4)}`;
assert.match(str, /^name:\s[a-z]{3}\sid:\s[0-9]{4}$/);
});
});
describe('#int', () => {
it('default arguments', () => {
let num = random.int();
assert.isNumber(num);
});
it('should be int number', () => {
let num = random.int(3, 10);
assert.isNumber(num);
assert.strictEqual(String(num).match(/\./), null);
});
it('single number', () => {
let num = random.int(10, 10);
assert.strictEqual(num, 10);
});
it('single negative number', () => {
let num = random.int(-10, -10);
assert.strictEqual(num, -10);
});
});
describe('#float', () => {
it('default arguments', () => {
let num = random.float();
assert.isNumber(num);
});
it('should be float number', () => {
let num = random.float(3, 10);
assert.isNumber(num);
assert.notStrictEqual(String(num).match(/\./), null);
});
});
describe('#letter', () => {
it('default arguments', () => {
let letter = random.letter();
let code = letter.charCodeAt(0);
assert.isString(letter);
assert.isTrue(code >= 97 && code <= 122);
});
it('uppercase', () => {
let letter = random.letter('A', 'B');
assert.isTrue(letter === 'A' || letter === 'B');
});
it('uppercase and lowercase', () => {
let letter = random.letter('a', 'B');
assert.isTrue(letter === 'a' || letter === 'b');
});
it('should be one letter', () => {
let letter = random.letter('b', 'b');
assert.strictEqual(letter, 'b');
});
});
describe('#array', () => {
it('multi items', () => {
let num = random.array([1, 2]);
assert.isTrue(num === 1 || num === 2);
});
it('one item', () => {
let letter = random.array(['a']);
assert.strictEqual(letter, 'a');
});
});
describe('#one', () => {
it('multi items', () => {
let num = random.one(1, 2, 3);
assert.include([1, 2, 3], num);
});
it('one item', () => {
let letter = random.one('a');
assert.strictEqual(letter, 'a');
});
});
describe('#chance', () => {
it('multi items', () => {
let numList = [];
for (let i = 0; i < 1000; i++) {
numList.push(random.chance({
'0.1': 1,
'0.7': 3,
'0.2': 2
}));
}
assert.include(numList, 1);
assert.include(numList, 2);
assert.include(numList, 3);
});
it('percent', () => {
let num = random.chance({
'200%': 1
});
assert.strictEqual(num, 1);
});
it('value is function', () => {
let num = random.chance({
'1': () => {
return 2;
}
});
assert.strictEqual(num, 2);
});
});
});
|
<gh_stars>10-100
package httptest
import (
"bytes"
"fmt"
"io/ioutil"
"net/http"
"net/http/httptest"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestQueryString(t *testing.T) {
t.Parallel()
s := httptest.NewServer(MockHandler(
Match(Get, Path("/api/v1/query"), QueryValues("rawr", "rawr", "rawr2")).Respond(Status(200)),
Match(Get, Path("/api/v1/query"), QueryValues("rawr", "rawr")).Respond(Status(200)),
Match(Get, Path("/api/v1/query"), Query("blah", "blah2").Add("rawr", "rawr2")).Respond(Status(201)),
))
defer s.Close()
testCases := []struct {
method string
path string
status int
body []byte
headers map[string][]string
}{
{"GET", "/api/v1/query?rawr=rawr", 200, []byte{}, map[string][]string{}},
{"GET", "/api/v1/query?rawr=rawr2&rawr=rawr", 200, []byte{}, map[string][]string{}},
{"GET", "/api/v1/query?rawr=rawrzers", StatusNoMatch, []byte(FailedBody), map[string][]string{}},
{"GET", "/api/v1/query?blah=blah2", StatusNoMatch, []byte(FailedBody), map[string][]string{}},
{"GET", "/api/v1/query", StatusNoMatch, []byte(FailedBody), map[string][]string{}},
}
for i := range testCases {
t.Run(fmt.Sprintf("%s %s", testCases[i].method, testCases[i].path), func(t *testing.T) {
tc := testCases[i]
client := &http.Client{}
req, err := http.NewRequest(tc.method, fmt.Sprintf("%s%s", s.URL, tc.path), nil)
require.NoError(t, err)
assert.NotNil(t, req)
res, err := client.Do(req)
require.NoError(t, err)
assert.NotNil(t, res)
body, err := ioutil.ReadAll(res.Body)
require.NoError(t, err)
assert.Equal(t, tc.status, res.StatusCode)
assert.Equal(t, tc.body, body)
for k, v := range tc.headers {
assert.ElementsMatch(t, v, res.Header[k])
}
})
}
}
func TestJSONof(t *testing.T) {
t.Parallel()
s := httptest.NewServer(MockHandler(
Match(Get, Path("/api/v1/exact"), JSONof(t, &map[string]interface{}{
"test": "kairo",
"user": "dphan2",
})).Respond(Status(200)),
))
defer s.Close()
testCases := []struct {
method string
path string
reqBody []byte
status int
body []byte
headers map[string][]string
}{
{"GET", "/api/v1/exact", nil, StatusNoMatch, []byte(FailedBody), map[string][]string{}},
{"GET", "/api/v1/exact", []byte("{\"test\": \"kairo\", \"user\": \"dphan2\"}"), 200, []byte{}, map[string][]string{}},
{"GET", "/api/v1/exact", []byte("{\"test\": \"kairo\", \"user\": \"wronguser\"}"), StatusNoMatch, []byte(FailedBody), map[string][]string{}},
{"GET", "/api/v1/exact", []byte("{\"test\": \"kairo\", \"user\": \"dphan2\", \"id\": 2}"), StatusNoMatch, []byte(FailedBody), map[string][]string{}},
{"GET", "/api/v1/exact", []byte("{\"test\": \"kairo\", \"user\": \"wronguser\", \"id\": 2}"), StatusNoMatch, []byte(FailedBody), map[string][]string{}},
{"GET", "/api/v1/exact", []byte("blahblah"), StatusNoMatch, []byte(FailedBody), map[string][]string{}},
}
for i := range testCases {
t.Run(fmt.Sprintf("%s %s %d", testCases[i].method, testCases[i].path, testCases[i].status), func(t *testing.T) {
tc := testCases[i]
client := &http.Client{}
req, err := http.NewRequest(tc.method, fmt.Sprintf("%s%s", s.URL, tc.path), bytes.NewReader(tc.reqBody))
require.NoError(t, err)
req.Header.Set("Content-type", "application/json")
res, err := client.Do(req)
require.NoError(t, err)
body, err := ioutil.ReadAll(res.Body)
require.NoError(t, err)
assert.Equal(t, tc.status, res.StatusCode)
assert.Equal(t, tc.body, body)
for k, v := range tc.headers {
assert.ElementsMatch(t, v, res.Header[k])
}
})
}
}
func TestHttp(t *testing.T) {
t.Parallel()
s := httptest.NewServer(MockHandler(
Match(Get, Path("/api/v1/foo")).Respond(Status(200)),
Match(Post, Path("/api/v1/bar")).Respond(Status(201), Body("hello")),
Match(Post, Path("/api/v1/brunoTheDog")).RespondSeq(
Responders{
Status(200),
Body("fetch"),
},
Responders{
Status(201),
JSON(t, map[string]interface{}{"dog": 3}),
},
),
))
defer s.Close()
defaultHeaders := map[string][]string{
"Content-Type": {
"text/plain; charset=utf-8",
},
}
testCases := []struct {
method string
path string
status int
body []byte
headers map[string][]string
}{
{"GET", "/api/v1/foo", 200, []byte{}, map[string][]string{}},
{"POST", "/api/v1/bar", 201, []byte("hello"), defaultHeaders},
{"POST", "/api/v1/brunoTheDog", 200, []byte("fetch"), defaultHeaders},
{"POST", "/api/v1/brunoTheDog", 201, []byte("{\"dog\":3}"), map[string][]string{"Content-Type": {"application/json"}}},
{"POST", "/api/v1/baz", StatusNoMatch, []byte(FailedBody), map[string][]string{}},
}
for i := range testCases {
t.Run(fmt.Sprintf("%s %s", testCases[i].method, testCases[i].path), func(t *testing.T) {
tc := testCases[i]
client := &http.Client{}
req, err := http.NewRequest(tc.method, fmt.Sprintf("%s%s", s.URL, tc.path), nil)
require.NoError(t, err)
assert.NotNil(t, req)
res, err := client.Do(req)
require.NoError(t, err)
assert.NotNil(t, res)
body, err := ioutil.ReadAll(res.Body)
require.NoError(t, err)
assert.Equal(t, tc.status, res.StatusCode)
assert.Equal(t, tc.body, body)
for k, v := range tc.headers {
assert.ElementsMatch(t, v, res.Header[k])
}
})
}
}
|
<filename>src/components/lettuce-button/lettuce-button.tsx<gh_stars>1-10
// eslint-disable-next-line @typescript-eslint/no-unused-vars
import {Component, h, Prop} from '@stencil/core';
@Component({
tag: 'lettuce-button',
styleUrl: 'lettuce-button.css',
shadow: true,
})
export class LettuceButton {
@Prop() text: string;
@Prop() type = 'primary';
@Prop() fluid = false;
@Prop() sizeSelection: string | 'sm' | 'md' | 'lg' = 'md';
@Prop() square = false;
@Prop() newGame = false;
render(): void {
return (
<button type="button" class={`btn ${this.square ? 'square' : ''} ${this.newGame ? 'new-game' : ''} ${this.type} ${this.fluid ? 'fluid' : null} txt-${this.sizeSelection}`}>
{this.text}
<slot />
</button>
);
}
}
|
<filename>src/view/app/Components/ShowKeysCheckBox.tsx
import * as React from "react";
import "../Css/Toolbar.css";
interface ShowsKeysCheckBoxProps {
currentCheckBoxStatus: boolean;
onCheckBoxStausChange: (newStaus: boolean) => void;
}
// eslint-disable-next-line @typescript-eslint/naming-convention
const ShowKeysCheckBox: React.FC<ShowsKeysCheckBoxProps> = (props) => {
return (
<div className="nav-item ">
<div className="form-check btn btn-custom">
<input
className="form-check-input shadow-none "
type="checkbox"
value=""
id="flexCheckChecked"
checked={props.currentCheckBoxStatus}
onChange={() => {
props.onCheckBoxStausChange(!props.currentCheckBoxStatus);
}}
/>
<label className="form-check-label fw-bold" htmlFor="flexCheckChecked">
Show Keys
</label>
</div>
</div>
);
};
export default ShowKeysCheckBox;
|
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author <NAME>
* @version 1.2
* @date Sat May 30 13:51:12 EDT 2015
* @see LICENSE (MIT style license file).
*/
package scalation.linalgebra.bld
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `BldParams` trait defines common values to be used in code generation.
*/
trait BldParams
{
val _l = java.io.File.separator
val DIR = s"src${_l}main${_l}scala${_l}scalation${_l}linalgebra${_l}bld"
// VECTOR BASE VECTOR2 BASE2 FORMAT MATRI SORTING ZERO ONE
val kind = Array (("VectorI", "Int", "VectorD", "Double", "%d", "MatriI", "SortingI", "0", "1"),
("VectorL", "Long", "VectorD", "Double", "%d", "MatriL", "SortingL", "0l", "1l"),
("VectorD", "Double", "VectorI", "Int", "%g", "MatriD", "SortingD", "0.0", "1.0"),
("VectorR", "Rational", "VectorD", "Double", "%s", "MatriR", "SortingR", "_0", "_1"),
("VectorC", "Complex", "VectorD", "Double", "%s", "MatriC", "SortingC", "_0", "_1"))
} // BldParams trait
|
#!/usr/bin/env bats
#
# Copyright 2019 HAProxy Technologies
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
load '../../libs/auth_curl'
load '../../libs/version'
setup() {
# creating frontend and related HTTP request rule
read -r SC _ < <(auth_curl POST "/v2/services/haproxy/configuration/frontends?force_reload=true&version=$(version)" "@${E2E_DIR}/tests/frontends/post.json")
[ "${SC}" = 201 ]
read -r SC _ < <(auth_curl POST "/v2/services/haproxy/configuration/http_request_rules?parent_type=frontend&parent_name=test_frontend&force_reload=true&version=$(version)" "@${E2E_DIR}/tests/http_request_rules/unless.json")
[ "${SC}" = 201 ]
# creating backend and related HTTP request rule
read -r SC _ < <(auth_curl POST "/v2/services/haproxy/configuration/backends?force_reload=true&version=$(version)" "@${E2E_DIR}/tests/backends/post.json")
[ "${SC}" = 201 ]
read -r SC _ < <(auth_curl POST "/v2/services/haproxy/configuration/http_request_rules?parent_type=backend&parent_name=test_backend&force_reload=true&version=$(version)" "@${E2E_DIR}/tests/http_request_rules/unless.json")
[ "${SC}" = 201 ]
}
teardown() {
read -r SC _ < <(auth_curl DELETE "/v2/services/haproxy/configuration/frontends/test_frontend?force_reload=true&version=$(version)")
[ "${SC}" = 204 ]
read -r SC _ < <(auth_curl DELETE "/v2/services/haproxy/configuration/backends/test_backend?force_reload=true&version=$(version)")
[ "${SC}" = 204 ]
}
@test "Delete a HTTP Request Rule from frontend" {
read -r SC _ < <(auth_curl DELETE "/v2/services/haproxy/configuration/http_request_rules/0?parent_type=frontend&parent_name=test_frontend&force_reload=true&version=$(version)")
[ "${SC}" = 204 ]
}
@test "Delete a HTTP Request Rule from backend" {
read -r SC _ < <(auth_curl DELETE "/v2/services/haproxy/configuration/http_request_rules/0?parent_type=backend&parent_name=test_backend&force_reload=true&version=$(version)")
[ "${SC}" = 204 ]
}
|
geneECsFile=$1
if [ ! -z $2 ]; then
outputDir=$2
if [ ! -d $outputDir ]; then
mkdir $outputDir
fi
else
outputDir="./"
fi
nGenes=$(cat $geneECsFile|wc -l)
for ((i=1;i<=nGenes;i++)); do
gene=`head -$i $geneECsFile|tail -1|column -t|cut -f 1 -d " "`
ecs=`head -$i $geneECsFile|tail -1|column -t|cut -f 3 -d " "`
echo "retrieving $gene --> $ecs"
python getEnzFromUniprot.py -ec "$ecs" -o "$gene".fasta -r true -od $outputDir
wait
rm -f temp.xml
echo "number of sequences retrieved = "$(grep '^>' "$outputDir"/"$gene".fasta|wc -l)
done
|
<filename>lib/help/random_bytes.js
const crypto = require('crypto')
const { promisify } = require('util')
const randomFill = promisify(crypto.randomFill)
module.exports = async function randomBytes (bytes) {
const buf = Buffer.allocUnsafe(bytes)
return randomFill(buf)
}
|
<filename>src/main/java/io/github/intellij/dlanguage/jps/model/JpsDLanguageSdkType.java
package io.github.intellij.dlanguage.jps.model;
import com.intellij.openapi.util.SystemInfo;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.jps.model.JpsDummyElement;
import org.jetbrains.jps.model.JpsElementFactory;
import org.jetbrains.jps.model.JpsElementTypeWithDefaultProperties;
import org.jetbrains.jps.model.library.sdk.JpsSdkType;
import java.io.File;
/**
* This appears to be dead code. Dlang SDK is registered as DlangSdkType
* @see io.github.intellij.dlanguage.DlangSdkType
* todo: investigate JpsSdkType vs SdkType and ensure we're using the right base classes
*/
public class JpsDLanguageSdkType extends JpsSdkType<JpsDummyElement> implements JpsElementTypeWithDefaultProperties<JpsDummyElement> {
public static final JpsDLanguageSdkType INSTANCE = new JpsDLanguageSdkType();
@NotNull
public static File getExecutable(@NotNull final String path, @NotNull final String command) {
return new File(path, SystemInfo.isWindows ? command + ".exe" : command);
}
@NotNull
public static File getDmdExecutable(@NotNull final String sdkHome) {
return getExecutable(new File(sdkHome, "bin").getAbsolutePath(), "dmd");
}
@NotNull
@Override
public JpsDummyElement createDefaultProperties() {
return JpsElementFactory.getInstance().createDummyElement();
}
}
|
#!/bin/bash
prepend_lines() {
tail --lines=+2 $1 | perl -p -e "s,^,$2,g;"
}
printf "dist\tsys\tworkload\ttime\ttput\n"
for dist in zipfian uniform ; do
for sys in hbase kudu ; do
prepend_lines $dist-$sys/load-100M.log.tsv "${dist}\t${sys}\tload\t"
prepend_lines $dist-$sys/run-workloada.log.tsv "${dist}\t${sys}\ta\t"
prepend_lines $dist-$sys/run-workloadb.log.tsv "${dist}\t${sys}\tb\t"
prepend_lines $dist-$sys/run-workloadc.log.tsv "${dist}\t${sys}\tc\t"
prepend_lines $dist-$sys/run-workloadd.log.tsv "latest\t${sys}\td\t"
done
done
|
<gh_stars>1-10
class AddIndexToDocumentType < ActiveRecord::Migration[5.0]
def change
add_index :content_items, :document_type
end
end
|
(function() {
let arr = [1,2,3];
let res = []
function arrange(arr, temp){
for(let i = 0,length = arr.length; i<length; i++) {
let _temp = temp.slice(0)
console.log('_temp: ', _temp)
// 最后一步
if (_temp.length == length - 1) {
if(!_temp.includes(arr[i])) {
_temp.push(arr[i])
res.push(_temp)
}
continue;
}
// 分而治
if (!_temp.includes(arr[i])) {
_temp.push(arr[i])
arrange(arr, _temp);
}
}
}
arrange(arr,[]);
// console.log(res)
})()
|
<gh_stars>1-10
const fs = require("fs");
const logger = require("./logger.helper");
module.exports = {
saveTokens: globalStorage => {
try {
const fd = fs.openSync(`${__dirname}/../tmpTokens.js`, "w");
fs.writeSync(fd, "module.exports = {");
Object.keys(globalStorage.socketCache).map(token => fs.writeSync(fd, `"${token}":{refresh_token:"${globalStorage.socketCache[token].refresh_token}",userId:${globalStorage.socketCache[token].userId}},`));
fs.writeSync(fd, "}");
fs.closeSync(fd);
} catch (error) {
logger.add_log({
type: "Error",
description: "An error occured during writing tokens to file",
additionnal_infos: {Error: error}
});
}
process.exit(0);
}
};
|
<reponame>stungkit/wertik-js
const Wertik = require("./lib/next/index").default;
Wertik();
|
//
// Ryu
//
// Copyright (C) 2017 <NAME>
// All Rights Reserved.
//
// See the LICENSE file for details about the license covering
// this source code file.
//
#include <common/bytes.h>
#include "ram.h"
RTTR_REGISTRATION {
rttr::registration::class_<ryu::hardware::ram>("ryu::hardware::ram") (
rttr::metadata(ryu::hardware::meta_data_key::type_id, ryu::hardware::ram_id),
rttr::metadata(ryu::hardware::meta_data_key::type_name, "RAM IC"),
rttr::metadata(ryu::hardware::meta_data_key::type_category, ryu::hardware::category_memory)
)
.constructor<>(rttr::registration::public_access) (
rttr::policy::ctor::as_raw_ptr
);
}
namespace ryu::hardware {
ram::ram() : integrated_circuit("ram-ic") {
}
ram::~ram() {
delete _buffer;
_buffer = nullptr;
}
void ram::init() {
}
void ram::zero() {
std::memset(_buffer, 0, address_space());
}
void ram::on_initialize() {
}
ryu::core::byte_list ram::write_word(
uint32_t address,
uint16_t value,
integrated_circuit::endianness::types endianess) {
ryu::core::byte_list data {};
if (is_platform_little_endian()
&& endianess == integrated_circuit::endianness::types::big) {
value = endian_swap_word(value);
}
auto byte_ptr = reinterpret_cast<uint8_t*>(&value);
_buffer[address] = *byte_ptr;
_buffer[address + 1] = *(byte_ptr + 1);
data.push_back(*byte_ptr);
data.push_back(*(byte_ptr + 1));
return data;
}
ryu::core::byte_list ram::write_dword(
uint32_t address,
uint32_t value,
integrated_circuit::endianness::types endianess) {
ryu::core::byte_list data {};
if (is_platform_little_endian()
&& endianess == integrated_circuit::endianness::types::big) {
value = endian_swap_dword(value);
}
auto byte_ptr = reinterpret_cast<uint8_t*>(&value);
_buffer[address] = *byte_ptr;
_buffer[address + 1] = *(byte_ptr + 1);
_buffer[address + 2] = *(byte_ptr + 2);
_buffer[address + 3] = *(byte_ptr + 3);
data.push_back(*byte_ptr);
data.push_back(*(byte_ptr + 1));
data.push_back(*(byte_ptr + 2));
data.push_back(*(byte_ptr + 3));
return data;
}
uint16_t ram::read_word(
uint32_t address,
integrated_circuit::endianness::types endianess) const {
auto value = *(reinterpret_cast<uint16_t*>(_buffer + address));
if (is_platform_little_endian()
&& endianess == integrated_circuit::endianness::types::big) {
return endian_swap_word(value);
}
return value;
}
uint32_t ram::read_dword(
uint32_t address,
integrated_circuit::endianness::types endianess) const {
auto value = *(reinterpret_cast<uint32_t*>(_buffer + address));
if (is_platform_little_endian()
&& endianess == integrated_circuit::endianness::types::big) {
return endian_swap_dword(value);
}
return value;
}
void ram::reallocate() {
clear_memory_map();
add_memory_map_entry(0, address_space(), "RAM", "Read/write memory block");
delete _buffer;
_buffer = new uint8_t[address_space()];
fill(0xa9);
}
void ram::fill(uint8_t value) {
std::memset(_buffer, value, address_space());
}
void ram::on_address_space_changed() {
reallocate();
}
access_type_flags ram::access_type() const {
return access_types::writable | access_types::readable;
}
uint8_t ram::read_byte(uint32_t address) const {
return _buffer[address];
}
void ram::write_byte(uint32_t address, uint8_t value) {
_buffer[address] = value;
}
}
|
package pl.put.poznan.bootstrap_builder.logic;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InOrder;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
public class DirectorTest {
@Test
public void constructHTML() {
//setting object that mock will return
HTML htmlFooter = new HTML();
htmlFooter.setDescription("footer");
htmlFooter.setHtmlSkeleton("<html>\n" +
"<head>\n" +
"<meta name=\"description\" content=\"\" />\n" +
"<meta name=\"url\" content=\"\">\n" +
"<meta name=\"robots\" content=\"\" />\n" +
"<title></title>\n" +
"</head>\n\n" +
"<body>\n" +
"<main class=\"container\"></main>\n" +
"<body>\n" +
"</html>");
//Testing htmlFooterBuilder using mocks
HTMLBuilder htmlFooterBuilder = mock(HTMLFooterBuilder.class);
when(htmlFooterBuilder.getHTML()).thenReturn(htmlFooter);
Director director = new Director(htmlFooterBuilder);
HTML result = director.constructHTML();
InOrder inOrder = inOrder(htmlFooterBuilder);
inOrder.verify(htmlFooterBuilder).addHTMLTag();
inOrder.verify(htmlFooterBuilder).addHeadTag();
inOrder.verify(htmlFooterBuilder).addMetaTag();
inOrder.verify(htmlFooterBuilder).addTitleTag();
inOrder.verify(htmlFooterBuilder).addHeadClosingTag();
inOrder.verify(htmlFooterBuilder).addBodyTag();
inOrder.verify(htmlFooterBuilder).addMainTag();
inOrder.verify(htmlFooterBuilder).addBodyClosingTag();
inOrder.verify(htmlFooterBuilder).addHTMLClosingTag();
inOrder.verify(htmlFooterBuilder).getHTML();
verify(htmlFooterBuilder).getHTML();
assertEquals(result.getHtmlSkeleton(), htmlFooterBuilder.getHTML().getHtmlSkeleton());
}
}
|
<reponame>DrYaoHongBin/shop
package com.shop.dao.merchant;
import com.shop.model.merchant.Merchant;
import com.shop.util.BaseMapper;
public interface MerchantMapper extends BaseMapper<Merchant> {
}
|
//
// Created by ooooo on 2020/3/11.
//
#ifndef CPP_014_1__SOLUTION2_H_
#define CPP_014_1__SOLUTION2_H_
#include <iostream>
#include <unordered_map>
#include <vector>
using namespace std;
/**
* dp[n] = max( dp[1] * dp[n-1], dp[2] * dp[n-2] )
*
* dp[n] 表示绳子长度为 n
*/
class Solution {
public:
int cuttingRope(int n) {
vector<int> dp(n + 1);
dp[0] = 1;
for (int i = 1; i <= n; ++i) {
// 不为 n 时,可以取整段
dp[i] = i == n ? 1 : i;
for (int j = 1; j <= i / 2; ++j) {
dp[i] = max(dp[i], dp[j] * dp[i - j]);
}
}
return dp[n];
}
};
#endif //CPP_014_1__SOLUTION2_H_
|
package com.trackorjargh.javacontrollers;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import javax.servlet.http.HttpServletResponse;
import org.springframework.stereotype.Controller;
import org.springframework.util.FileCopyUtils;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
@Controller
public class ImagesControler {
@RequestMapping("/imagen/{fileName:.+}")
public void handleFileDownload(@PathVariable String fileName, HttpServletResponse res)
throws FileNotFoundException, IOException {
Path FILES_FOLDER = Paths.get(System.getProperty("user.dir"), "files");
Path image = FILES_FOLDER.resolve(fileName);
if (Files.exists(image)) {
res.setContentType("image/jpeg");
res.setContentLength((int) image.toFile().length());
FileCopyUtils.copy(Files.newInputStream(image), res.getOutputStream());
} else {
res.sendError(404);
}
}
}
|
#!/bin/bash
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
# This script drives the experimental Ibex synthesis flow. More details can be
# found in README.md
set -e
set -o pipefail
error () {
echo >&2 "$@"
exit 1
}
teelog () {
tee "$LR_SYNTH_OUT_DIR/log/$1.log"
}
if [ ! -f syn_setup.sh ]; then
error "No syn_setup.sh file: see README.md for instructions"
fi
#-------------------------------------------------------------------------
# setup flow variables
#-------------------------------------------------------------------------
source syn_setup.sh
#-------------------------------------------------------------------------
# use sv2v to convert all SystemVerilog files to Verilog
#-------------------------------------------------------------------------
mkdir -p "$LR_SYNTH_OUT_DIR/generated"
mkdir -p "$LR_SYNTH_OUT_DIR/log"
mkdir -p "$LR_SYNTH_OUT_DIR/reports/timing"
for file in ../rtl/*.sv; do
module=`basename -s .sv $file`
# Skip packages
if echo "$module" | grep -q '_pkg$'; then
continue
fi
sv2v \
--define=SYNTHESIS \
../rtl/*_pkg.sv \
../vendor/lowrisc_ip/ip/prim/rtl/prim_ram_1p_pkg.sv \
-I../vendor/lowrisc_ip/ip/prim/rtl \
-I../vendor/lowrisc_ip/dv/sv/dv_utils \
$file \
> $LR_SYNTH_OUT_DIR/generated/${module}.v
done
# remove tracer (not needed for synthesis)
rm -f $LR_SYNTH_OUT_DIR/generated/ibex_tracer.v
# remove the FPGA & register-based register file (because we will use the
# latch-based one instead)
rm -f $LR_SYNTH_OUT_DIR/generated/ibex_register_file_ff.v
rm -f $LR_SYNTH_OUT_DIR/generated/ibex_register_file_fpga.v
yosys -c ./tcl/yosys_run_synth.tcl |& teelog syn || {
error "Failed to synthesize RTL with Yosys"
}
sta ./tcl/sta_run_reports.tcl |& teelog sta || {
error "Failed to run static timing analysis"
}
./translate_timing_rpts.sh
python/get_kge.py $LR_SYNTH_CELL_LIBRARY_PATH $LR_SYNTH_OUT_DIR/reports/area.rpt
|
"""Utils for common operations in GUI tests
"""
__author__ = "<NAME>"
__copyright__ = "Copyright (C) 2017 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
|
<gh_stars>1-10
SpacebarsCompiler.parse = function (input) {
var tree = HTMLTools.parseFragment(
input,
{ getTemplateTag: TemplateTag.parseCompleteTag });
return tree;
};
SpacebarsCompiler.compile = function (input, options) {
var tree = SpacebarsCompiler.parse(input);
return SpacebarsCompiler.codeGen(tree, options);
};
SpacebarsCompiler._TemplateTagReplacer = HTML.TransformingVisitor.extend();
SpacebarsCompiler._TemplateTagReplacer.def({
visitObject: function (x) {
if (x instanceof HTMLTools.TemplateTag) {
// Make sure all TemplateTags in attributes have the right
// `.position` set on them. This is a bit of a hack
// (we shouldn't be mutating that here), but it allows
// cleaner codegen of "synthetic" attributes like TEXTAREA's
// "value", where the template tags were originally not
// in an attribute.
if (this.inAttributeValue)
x.position = HTMLTools.TEMPLATE_TAG_POSITION.IN_ATTRIBUTE;
return this.codegen.codeGenTemplateTag(x);
}
return HTML.TransformingVisitor.prototype.visitObject.call(this, x);
},
visitAttributes: function (attrs) {
if (attrs instanceof HTMLTools.TemplateTag)
return this.codegen.codeGenTemplateTag(attrs);
// call super (e.g. for case where `attrs` is an array)
return HTML.TransformingVisitor.prototype.visitAttributes.call(this, attrs);
},
visitAttribute: function (name, value, tag) {
this.inAttributeValue = true;
var result = this.visit(value);
this.inAttributeValue = false;
if (result !== value) {
// some template tags must have been replaced, because otherwise
// we try to keep things `===` when transforming. Wrap the code
// in a function as per the rules. You can't have
// `{id: Blaze.View(...)}` as an attributes dict because the View
// would be rendered more than once; you need to wrap it in a function
// so that it's a different View each time.
return BlazeTools.EmitCode(this.codegen.codeGenBlock(result));
}
return result;
}
});
SpacebarsCompiler.codeGen = function (parseTree, options) {
// is this a template, rather than a block passed to
// a block helper, say
var isTemplate = (options && options.isTemplate);
var isBody = (options && options.isBody);
var sourceName = (options && options.sourceName);
var tree = parseTree;
// The flags `isTemplate` and `isBody` are kind of a hack.
if (isTemplate || isBody) {
// optimizing fragments would require being smarter about whether we are
// in a TEXTAREA, say.
tree = SpacebarsCompiler.optimize(tree);
}
// throws an error if using `{{> React}}` with siblings
new ReactComponentSiblingForbidder({sourceName: sourceName})
.visit(tree);
var codegen = new SpacebarsCompiler.CodeGen;
tree = (new SpacebarsCompiler._TemplateTagReplacer(
{codegen: codegen})).visit(tree);
var code = '(function () { ';
if (isTemplate || isBody) {
code += 'var view = this; ';
}
code += 'return ';
code += BlazeTools.toJS(tree);
code += '; })';
code = SpacebarsCompiler._beautify(code);
return code;
};
SpacebarsCompiler._beautify = function (code) {
if (Package.minifiers && Package.minifiers.UglifyJSMinify) {
var result = Package.minifiers.UglifyJSMinify(
code,
{ fromString: true,
mangle: false,
compress: false,
output: { beautify: true,
indent_level: 2,
width: 80 } });
var output = result.code;
// Uglify interprets our expression as a statement and may add a semicolon.
// Strip trailing semicolon.
output = output.replace(/;$/, '');
return output;
} else {
// don't actually beautify; no UglifyJS
return code;
}
};
|
#!/bin/bash
if [ ! -d "$HOME"/atshome ]; then mkdir -p "$HOME"/atshome; fi
pushd "$HOME"/atshome
wget --trust-server-name 'http://downloads.sourceforge.net/project/ats2-lang/ats2-lang/ats2-postiats-0.3.6/ATS2-Postiats-0.3.6.tgz?r=http%3A%2F%2Fsourceforge.net%2Fprojects%2Fats2-lang%2Ffiles%2Fats2-lang%2Fats2-postiats-0.3.6%2F&ts=1437234613&use_mirror=jaist'
wget --trust-server-name 'http://downloads.sourceforge.net/project/ats2-lang-contrib/ats2-lang-contrib/ATS2-Postiats-contrib-0.3.6.tgz?r=http%3A%2F%2Fsourceforge.net%2Fprojects%2Fats2-lang-contrib%2Ffiles%2Fats2-lang-contrib%2F&ts=1437234808&use_mirror=jaist'
tar xf ATS2-Postiats-0.3.6.tgz
PATSHOME="$(pwd)"/ATS2-Postiats-0.3.6
export PATSHOME
export PATH=${PATSHOME}/bin:${PATH}
rm ATS2-Postiats-0.3.6.tgz
tar xf ATS2-Postiats-contrib-0.3.6.tgz
PATSHOMERELOC="$(pwd)"/ATS2-Postiats-contrib-0.3.6
export PATSHOMERELOC
rm ATS2-Postiats-contrib-0.3.6.tgz
pushd "${PATSHOME}"
./configure
make
echo 'Add to .bashrc or .zshrc'
echo ' export PATSHOME="$HOME"/atshome/ATS2-Postiats-0.3.6'
echo ' export PATH=${PATSHOME}/bin:${PATH}'
echo ' export PATSHOMERELOC="$HOME"/atshome/ATS2-Postiats-contrib-0.3.6'
echo 'example:'
echo ' touch hello.dats'
echo ' echo '\''implement main0 () = println! ("Hello world!")'\'' > hello.dats'
echo ' patscc -o hello hello.dats'
echo ' ./hello'
|
#!/usr/bin/env python
#
# Copyright (c) 2016 by MemSQL. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import setup, find_packages
def readme():
with open('README.md') as f:
return f.read()
setup(
name='memsql-top',
version='0.0.11',
description='A tool for visualing top queries run against memsql',
long_description=readme(),
author='<NAME>',
author_email='awreece' '@' '<EMAIL>',
license='Apache License',
install_requires=[
'urwid==1.3.1',
'attrdict==2.0.0',
'pymysql==0.7.11',
],
packages=find_packages(),
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console :: Curses',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Database',
'Topic :: Software Development :: Testing',
],
entry_points={
'console_scripts': [
'memsql-top=memsql_top.main:main',
],
}
)
|
//父子组件通讯
export default{
store:null,
Instance(Vue){
if(this.store===null){
this.store=new Vue;
}
},
}
|
package db
import (
"context"
"time"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/primitive"
"go.mongodb.org/mongo-driver/mongo"
)
const (
database = "btcPrice"
// CollectionPrice a collection name for CRU pic
CollectionPrice = "prices"
// CollectionUser a collection name for CR user
CollectionUser = "users"
// CollectionLatistedID a collection name for foreign key(?) for prices
CollectionLatistedID = "latestID"
)
var mongoClient *mongo.Client
// SetMongoClint initialize mongoClient
func SetMongoClint(c *mongo.Client) {
// can be more explicitly
if mongoClient == nil {
mongoClient = c // <--- NOT THREAD SAFE
}
}
// GetCollection to get the connection for mongodb collection
func GetCollection(collectionName string) *mongo.Collection {
return mongoClient.Database(database).Collection(collectionName)
}
// Create one obj into specify collection
func Create(collectionName string, item interface{}) (*mongo.InsertOneResult, error) {
collection := mongoClient.Database(database).Collection(collectionName)
ctx, cancle := context.WithTimeout(context.Background(), 10*time.Second)
defer cancle()
return collection.InsertOne(ctx, item)
}
// Delete one obj from specify collection
func Delete(collectionName string, _id primitive.ObjectID) (*mongo.DeleteResult, error) {
filter := bson.M{"_id": bson.M{"$eq": _id}}
collection := mongoClient.Database(database).Collection(collectionName)
ctx, cancle := context.WithTimeout(context.Background(), 10*time.Second)
defer cancle()
return collection.DeleteOne(ctx, filter)
}
// Update one obj from specify collection
func Update(collectionName string, _id primitive.ObjectID, item interface{}) (*mongo.UpdateResult, error) {
filter := bson.M{"_id": bson.M{"$eq": _id}}
update := bson.M{"$set": item}
collection := mongoClient.Database(database).Collection(collectionName)
ctx, cancle := context.WithTimeout(context.Background(), 10*time.Second)
defer cancle()
return collection.UpdateMany(ctx, filter, update)
}
// FindOne find one obj from specify collection
func FindOne(collectionName string, filter interface{}) (r *mongo.SingleResult) {
collection := mongoClient.Database(database).Collection(collectionName)
ctx, cancle := context.WithTimeout(context.Background(), 10*time.Second)
defer cancle()
return collection.FindOne(ctx, filter)
}
|
<filename>src/com/samoleary/Mobile_Asn_Two/GameFinish.java
package com.samoleary.Mobile_Asn_Two;
import android.app.Activity;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
import android.graphics.Typeface;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import java.util.ArrayList;
/**
* Author: <NAME>
* Email: <EMAIL>
* Created: 15/11/13
* Revision: 3
* Revision History:
* 1: 15/11/13
* 2: 27/11/13
* 3: 29/11/13
*
* Description:
* This Activity is called once the user completes the bonus game and finishes their overall game.
* If the user manages to answer over 40% of the questions correctly they will have passed the game and a congratulations
* message is displayed, along with the number of questions they got right and the experience points they have earned.
* If they get less than 40% then a condolences message is displayed, along with the number of questions they got right
* but no experience points will have been earned.
* The user can navigate away from this Activity via the Action Bar or the Main Menu button.
* The results of their game are stored in the database once this activity starts.
* If the user gets a score that is higher than any score they've previously gotten then they will receive a notification
* congratulating them.
*/
public class GameFinish extends Activity {
private NotificationManager mNManager;
private static final int NOTIFY_ID = 1100;
private GameDB dba; // The Database object used to read/write to the SQLite Database.
private ArrayList<GameData> gameDatas; // This ArrayList will hold GameData objects. These objects contain data about the user
// that are stored in a Database.
private int size; // This variable holds the size of the ArrayList.
private GameData latestGame; // GameData object that holds the information from the previous game.
private int resultPercent; // The percentage, or grade, the user achieved in their game.
private TextView heading;
private TextView msg;
private TextView qsRight;
private TextView xpEarned;
private int score;
private int totalqs;
private Typeface chalkTypeFace; // The Typeface object changes the font of the text in view to which it is applied to.
private GameHelper helper; // This object contains 2 methods that calculate the largest number in a series of numbers
// and the average of a series of numbers.
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.gamefinish);
init();
}
/**
* onCreateOptionsMenu allows menu items to be placed in the Action Bar that runs along the top of the screen. The
* menu items and their properties, such as the id and icon, are described in the XML file 'main_action_menu'.
* The MenuInflater takes this list and literally 'inflates' it into a menu to be displayed.
*
* @param menu
* The Menu object representing the Menu to be 'inflated', or populated.
*
* @return
* Returns either True or False if the Menu has been created successfully or not.
*/
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.main_action_menu, menu);
return super.onCreateOptionsMenu(menu);
}
/**
* This initialize method sets up the Views to be displayed to the user, as well as configuring a font for the Typeface
* object to use and applying this Typeface to all the Views with text in them.
* The Notification Service and Notifcation Manager are initialized here.
* The GameHelper object is also initialized in here, as well the Database object. An ArrayList configured to contain
* objects of type GameData is initialized and will be populated with data from the Database.
*/
private void init() {
helper = new GameHelper();
// Initializing the Notification Manager
String ns = Context.NOTIFICATION_SERVICE;
mNManager = (NotificationManager) getSystemService(ns);
final Notification msgNotif = new Notification(R.drawable.apple,
String.format(getResources().getString(R.string.gamefinish_notifmsg)),
System.currentTimeMillis()); // Sets the message displayed at the top of the screen when the notification comes in.
// Initializing the Database object and opening the connection to the Database.
dba = new GameDB(this);
dba.open();
gameDatas = new ArrayList<GameData>();
// Retrieving information from the previous Activity.
Intent infoFromGameBonus = getIntent();
score = infoFromGameBonus.getIntExtra("score", 0);
totalqs = infoFromGameBonus.getIntExtra("totalqs", 0);
chalkTypeFace = Typeface.createFromAsset(getAssets(), "fonts/kg.ttf"); // The Typeface object changes the font of the text in view to which it is applied to.
heading = (TextView) findViewById(R.id.gamefinish_heading);
msg = (TextView) findViewById(R.id.gamefinish_msg);
qsRight = (TextView) findViewById(R.id.gamefinish_qsright);
xpEarned = (TextView) findViewById(R.id.gamefinish_xpearned);
Button mainMenu = (Button) findViewById(R.id.gamefinish_btn);
mainMenu.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
try {
gotoMainMenu();
} catch (Exception e) {
e.printStackTrace();
}
}
});
heading.setTypeface(chalkTypeFace);
msg.setTypeface(chalkTypeFace);
qsRight.setTypeface(chalkTypeFace);
xpEarned.setTypeface(chalkTypeFace);
mainMenu.setTypeface(chalkTypeFace);
// Sets the Views to display the data from the current game.
setViews();
// Determines if the user has set a highscore and, if so, sets off a notification.
getData(msgNotif);
}
/**
* Saves the data from the current game to the Database.
* If the size of the ArrayList that contains data previously read out from the Database is zero, i.e the Database
* is empty then this method determines if the user gained enough experience points to gain their first level then
* enters the info into the Database.
* If the ArrayList does contain previous data then steps are taken to update some of the data, such as total
* experience points, games played and current level before being saved to the Database.
*/
private void saveToDB() {
size = gameDatas.size();
if(size == 0) {
if(resultPercent == 100) { // The user has gained 100 experience points in their first
dba.insertInfo(1, score, resultPercent, 1); // game and has gained a level.
dba.close();
} else {
dba.insertInfo(0, score, resultPercent, 1);
dba.close();
}
} else {
latestGame = gameDatas.get(size - 1); // Get the latest GameData object.
int newXP = latestGame.getxP() + resultPercent;
int newLevel = calculateLevel();
int gamesPlayed = latestGame.getLevel();
gamesPlayed++;
dba.insertInfo(newLevel, score, newXP, gamesPlayed);
dba.close();
}
}
/**
* This method, calculateLevel(), determines if a user has gained a level based on their current experience points
* and experience points they've just earned. Every 100 points a user gains a level.
*
* @return
*/
private int calculateLevel() {
size = gameDatas.size();
int newLevel;
if(!(size == 0)) { // If the ArrayList isn't empty then a game has previously taken place
latestGame = gameDatas.get(size - 1); // and we think the user might have some experience or a level.
int currentLvl = latestGame.getGamesPlayed();
int currentXP = latestGame.getxP();
if(currentLvl == 0) { // If the user hasn't gained a level but has played previously
if (resultPercent + currentXP >= 100) { // Add their current experience to the experience they've just earned
newLevel = currentLvl + 1; // If the result is over 100 they level up.
return newLevel;
} else { // If the results is below 100 they don't level up yet.
newLevel = 0;
return newLevel;
}
}
else { // If the user is level 1 or above
int excessXP = currentXP - (currentLvl * 100); // Find out how much experience they have in their current level
if(resultPercent + excessXP >= 100) { // Add this experience to the experience they've just earned
newLevel = currentLvl + 1; // If the result is over 100 they level up.
return newLevel;
} else { // If the results is below 100 they don't level up yet.
return currentLvl;
}
}
} else { // This is the users first game, they have no experience or levels.
if(resultPercent == 100) { // If they get 100 in their first game they gain their first level.
newLevel = 1;
return newLevel;
} else {
newLevel = 0; // If not, then they stay at level 0.
return newLevel;
}
}
}
/**
* This method, getData(), attempts to retrieve data from the Database to populate the gameDatas ArrayList.
* If data is retrieved from the database then a quick comparison is made between the highest score achieved so far
* and the score just achieved. If the score that has just been achieved is greater than the highest score to date then
* the user receives a notification.
*
* @param msgNotif
* This is an object of type Notification that is passed to the giveNotificaiton() method if a highscore has been
* achieved.
*/
private void getData(Notification msgNotif) {
Cursor c = dba.getInfo();
startManagingCursor(c);
if(c.moveToFirst()){
do{
int gp = c.getInt(c.getColumnIndex(Constants.GAMES_PLAYED));
int hs = c.getInt(c.getColumnIndex(Constants.HIGH_SCORE));
int xp = c.getInt(c.getColumnIndex(Constants.XP));
int lvl = c.getInt(c.getColumnIndex(Constants.LEVEL));
GameData temp = new GameData(gp,hs,xp,lvl);
gameDatas.add(temp);
} while(c.moveToNext());
}
if(!gameDatas.isEmpty()) {
size = gameDatas.size();
int[] scoreArray = new int[size];
int max;
for (int i = 0; i < size; i++) {
scoreArray[i] = gameDatas.get(i).getHighScore();
}
max = helper.getMax(scoreArray);
if(score > max){
giveNotification(msgNotif);
}
}
saveToDB();
}
/**
* This method, giveNotification(), notifies the user if they have achieved a new high score. When the user checks the
* Notification Manager and taps on the notification they are directed to a Pending Activity that displays a couple
* of Text Views congratulating the user.
*
* @param msgNotif
* This is the notification object. Once passed in to this method a title, some text, a sound and a pending
* activity are assigned to this object.
*/
private void giveNotification(Notification msgNotif) {
Context context = getApplicationContext();
CharSequence contentTitle = String.format(getResources().getString(R.string.gamefinish_notiftitle));
CharSequence contentText = String.format(getResources().getString(R.string.gamefinish_notiftext));
Intent msgIntent = new Intent(this, NewHighscore.class);
PendingIntent intent = PendingIntent.getActivity(GameFinish.this, 0, msgIntent, Intent.FLAG_ACTIVITY_NEW_TASK);
msgNotif.defaults |= Notification.DEFAULT_SOUND;
msgNotif.flags |= Notification.FLAG_AUTO_CANCEL;
msgNotif.setLatestEventInfo(context, contentTitle, contentText, intent);
mNManager.notify(NOTIFY_ID, msgNotif);
}
/**
* This method, setViews(), determines what message to display to the user by doing a quick calculation on the score
* they've just received. If the score is less than 40 then they've failed and an appropriate message is displayed.
* If they've score more than 40 then a different message is displayed.
*/
private void setViews() {
double result = (score / (double) totalqs) * 100;
resultPercent = (int) Math.round(result);
String resultText = String.format(getResources().getString(R.string.gamefinish_qsright), score, totalqs);
qsRight.setText(resultText);
if(resultPercent < 40) {
heading.setText(String.format(getResources().getString(R.string.gamefinish_headingfail)));
msg.setText(String.format(getResources().getString(R.string.gamefinish_msgfail)));
int xp = 0;
xpEarned.setText(String.format(getResources().getString(R.string.gamefinish_xpearned), xp));
} else {
heading.setText(String.format(getResources().getString(R.string.gamefinish_headingpass)));
msg.setText(String.format(getResources().getString(R.string.gamefinish_msgpass)));
xpEarned.setText(String.format(getResources().getString(R.string.gamefinish_xpearned), resultPercent));
}
}
/**
* onOptionsItemSelected is a method that identifies the Menu Item that is selected from the Menu(s) available and
* and takes the appropriate action upon selection. In this case two Items are available to select from the Action
* Bar.
*
* @param item
* The selected Item is passed into the method where its ID is identified and used in a Switch statement to
* determine the action to take.
*
* @return
* The method returns True if the Item ID is matched within in the Switch statement, otherwise the Menu Item is
* passed to the Superclass where it is dealt with and the value from that method is returned.
*/
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.action_profile:
gotoUserProfile();
return true;
case R.id.action_settings:
gotoOptions();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
/**
* Creates an Intent to take the user to the Profile Activity.
*/
private void gotoUserProfile() {
Intent launchUserProfile = new Intent(this, Profile.class);
startActivity(launchUserProfile);
}
/**
* Creates an Intent to take the user to the Preferences Activity.
*/
private void gotoOptions() {
Intent launchOptions = new Intent(this, Preferences.class);
startActivity(launchOptions);
}
/**
* Creates an Intent to take the user to the Main Screen Activity.
*/
private void gotoMainMenu() {
Intent launchMainMenu = new Intent(this, MainScreen.class);
startActivity(launchMainMenu);
}
}
|
var path = require('path');
var fileSystem = require('fs');
var webpack = require('webpack');
var HtmlWebpackPlugin = require('html-webpack-plugin');
var AureliaWebpackPlugin = require('aurelia-webpack-plugin');
var pkg = require('./package.json');
var outputFileTemplateSuffix = '-' + pkg.version;
var vendorPackages = Object.keys(pkg.dependencies).filter(function(el) {
return el.indexOf('font') === -1; // exclude font packages from vendor bundle
});
module.exports = {
entry: {
main: [
'./src/main'
]
},
output: {
path: path.join(__dirname, 'dist'),
filename: '[name]' + outputFileTemplateSuffix + '.js',
chunkFilename: '[id]' + outputFileTemplateSuffix + '.js'
},
plugins: [
new AureliaWebpackPlugin(),
new HtmlWebpackPlugin({
title: 'Aurelia webpack skeleton - ' + pkg.version,
template: 'index.prod.html',
filename: 'index.html'
})
],
resolve: {
root: [
path.resolve('./'),
]
},
module: {
loaders: [
{ test: /\.js$/, loader: 'babel', exclude: /node_modules/, query: {stage: 0} },
{ test: /\.css?$/, loader: 'style!css' },
{ test: /\.html$/, loader: 'raw' },
{ test: /\.(png|gif|jpg)$/, loader: 'url-loader?limit=8192' },
{ test: /\.woff2(\?v=[0-9]\.[0-9]\.[0-9])?$/, loader: "url-loader?limit=10000&minetype=application/font-woff2" },
{ test: /\.woff(\?v=[0-9]\.[0-9]\.[0-9])?$/, loader: "url-loader?limit=10000&minetype=application/font-woff" },
{ test: /\.(ttf|eot|svg)(\?v=[0-9]\.[0-9]\.[0-9])?$/, loader: "file-loader" }
]
}
};
|
#!/bin/bash
#$ -cwd
#$ -l mem_free=2G,h_vmem=3G,h_fsize=100G
#######################################
# Shell script to run debris simulation
#######################################
# This script runs part of the debris simulation by:
# (i) converting BAM to SAM
# (ii) parsing through the merged SAM file to lyse some percentage of cell
# barcodes and assign these reads to other cell barcodes
# (iii) converting SAM back to BAM
# The modifed BAM file can then be used by cellSNP and Vireo (or alternative
# tools) in the following scripts.
# Notes:
# - lookup tables used in awk command are saved in .tsv files generated with
# the scripts "generate_awk_lookup_tables_X_debris.R"
# start runtime
start=`date +%s`
# -----------------------------
# Parse through merged BAM file
# -----------------------------
# parse through merged BAM file to lyse some percentage of cell barcodes and
# assign these reads to other cell barcodes
# for one simulation scenario (dataset, percent doublets, percent debris)
# note hyphen for argument order
samtools view -h ../../../benchmarking/outputs/lung/bam_merged/bam_merged.bam | \
awk \
-v f_remaining="../../../supplementary_debris/scenarios/lung/nodoublets/debris_remaining_lung_nodoublets_debris40pc.tsv" \
-v n_remaining="$(wc -l ../../../supplementary_debris/scenarios/lung/nodoublets/debris_remaining_lung_nodoublets_debris40pc.tsv | cut -f1 -d' ')" \
'NR==1 { next }
FNR==NR { lysed[$2]=$2; next }
FILENAME==f_remaining { remaining[$1]=$2; next }
(i=gensub(/.*CB\:Z\:([A-Za-z]+\-[A-Za-z0-9]+).*/, "\\1", 1, $0)) in lysed { gsub(i, remaining[int(rand()*n_remaining+1)]) }1' \
../../../supplementary_debris/scenarios/lung/nodoublets/debris_lysed_lung_nodoublets_debris40pc.tsv ../../../supplementary_debris/scenarios/lung/nodoublets/debris_remaining_lung_nodoublets_debris40pc.tsv - | \
samtools view -bo ../../../supplementary_debris/scenarios/lung/nodoublets/bam_merged_lung_nodoublets_debris40pc.bam
# ---------
# Index BAM
# ---------
samtools index ../../../supplementary_debris/scenarios/lung/nodoublets/bam_merged_lung_nodoublets_debris40pc.bam
# end runtime
end=`date +%s`
runtime=`expr $end - $start`
# save runtime
mkdir -p ../../../supplementary_debris/scenarios/lung/nodoublets
echo runtime: $runtime seconds > ../../../supplementary_debris/scenarios/lung/nodoublets/runtime_parse_BAM_lung_nodoublets_debris40pc.txt
|
<reponame>AJS-development/UnBottable-MultiOgar
// Import
var BinaryWriter = require("./BinaryWriter");
var Logger = require('../modules/Logger');
function UpdateNodes(playerTracker, addNodes, updNodes, eatNodes, delNodes) {
this.playerTracker = playerTracker;
this.addNodes = addNodes;
this.updNodes = updNodes;
this.eatNodes = eatNodes;
this.delNodes = delNodes;
}
module.exports = UpdateNodes;
UpdateNodes.prototype.build = function (protocol) {
var obj = {
eat: [],
update: [],
remove: []
};
for (var i = 0; i < this.eatNodes.length; i++) {
var node = this.eatNodes[i];
var hunterId = 0;
if (node.getKiller()) {
hunterId = node.getKiller().nodeId;
}
obj.eat.push({
killer: hunterId,
killed: node.nodeId
})
}
for (var i = 0; i < this.updNodes.length; i++) {
var node = this.updNodes[i];
if (node.nodeId == 0)
continue;
var cellX = node.position.x;
var cellY = node.position.y;
var color = node.getColor();
var flags = 0;
if (node.isSpiked)
flags |= 0x01; // isVirus
if (node.isAgitated)
flags |= 0x10; // isAgitated
if (node.cellType == 3)
flags |= 0x20; // isEjected
obj.update.push({
id: node.nodeId,
posX: Math.floor(cellX),
posY: Math.floor(cellY),
size: Math.floor(node.getSize()),
r: color.r,
g: color.g,
b: color.b,
flags: flags,
name: '',
skin: ''
})
}
for (var i = 0; i < this.addNodes.length; i++) {
var node = this.addNodes[i];
if (node.nodeId == 0)
continue;
var cellX = node.position.x;
var cellY = node.position.y;
var cellName = null;
if (node.owner) {
cellName = node.owner.getNameUnicode();
}
var color = node.getColor();
var flags = 0;
if (node.isSpiked)
flags |= 0x01; // isVirus
if (node.isAgitated)
flags |= 0x10; // isAgitated
if (node.cellType == 3)
flags |= 0x20; // isEjected
obj.update.push({
id: node.nodeId,
posX: Math.floor(cellX),
posY: Math.floor(cellY),
size: Math.floor(node.getSize()),
r: color.r,
g: color.g,
b: color.b,
flags: flags,
name: cellName || '',
skin: ''
})
}
for (var i = 0; i < this.eatNodes.length; i++) {
var node = this.eatNodes[i];
obj.remove.push(node.nodeId); // Cell ID
}
for (var i = 0; i < this.delNodes.length; i++) {
var node = this.delNodes[i];
obj.remove.push(node.nodeId); // Cell ID }
}
return this.playerTracker.protocol.update(obj);
};
|
package io.opensphere.kml.common.model;
/**
* An interface for a KML controller.
*/
public interface KMLController
{
/**
* Adds data to the controller.
*
* @param dataEvent The data event
* @param reload Flag indicating if this is a reload
*/
void addData(KMLDataEvent dataEvent, boolean reload);
/**
* Removes a data source from the controller.
*
* @param dataSource The data source
*/
void removeData(KMLDataSource dataSource);
}
|
#!/usr/bin/env bash
# This script is only relevant if you're rolling nixos.
# Esy (a bisect_ppx dependency/build tool) is borked on nixos without using an FHS shell. https://github.com/esy/esy/issues/858
# We need to patchelf rescript executables. https://github.com/NixOS/nixpkgs/issues/107375
set -x
fhsShellName="squiggle-development"
fhsShellDotNix="{pkgs ? import <nixpkgs> {} }: (pkgs.buildFHSUserEnv { name = \"${fhsShellName}\"; targetPkgs = pkgs: [pkgs.yarn]; runScript = \"yarn\"; }).env"
nix-shell - <<<"$fhsShellDotNix"
theLd=$(patchelf --print-interpreter $(which mkdir))
patchelf --set-interpreter $theLd ./node_modules/gentype/gentype.exe
patchelf --set-interpreter $theLd ./node_modules/rescript/linux/*.exe
patchelf --set-interpreter $theLd ./node_modules/bisect_ppx/ppx
patchelf --set-interpreter $theLd ./node_moduels/bisect_ppx/bisect-ppx-report
theSo=$(find /nix/store/*$fhsShellName*/lib64 -name libstdc++.so.6 | grep $fhsShellName | head -n 1)
patchelf --replace-needed libstdc++.so.6 $theSo ./node_modules/rescript/linux/ninja.exe
|
#!/usr/bin/env bash
###############################################################################
# Copyright 2020 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# Fail on first error.
set -ex
cd "$(dirname "${BASH_SOURCE[0]}")"
. ./installer_base.sh
MY_GEO=$1; shift
ARCH="$(uname -m)"
##----------------------------##
## APT sources.list settings |
##----------------------------##
if [[ "${ARCH}" == "x86_64" ]]; then
if [[ "${MY_GEO}" == "cn" ]]; then
# this rcfile is not very stable for my local machine
echo "using my local rcfile..."
# cp -f "${RCFILES_DIR}/sources.list.cn.x86_64" /etc/apt/sources.list
# sed -i 's/nvidia.com/nvidia.cn/g' /etc/apt/sources.list.d/nvidia-ml.list
else
sed -i 's/archive.ubuntu.com/us.archive.ubuntu.com/g' /etc/apt/sources.list
fi
else # aarch64
if [[ "${MY_GEO}" == "cn" ]]; then
cp -f "${RCFILES_DIR}/sources.list.cn.aarch64" /etc/apt/sources.list
fi
fi
# Disabled:
# apt-file
apt_get_update_and_install \
apt-utils \
bc \
curl \
file \
gawk \
git \
less \
lsof \
python3 \
python3-pip \
python3-distutils \
sed \
software-properties-common \
sudo \
unzip \
vim \
wget \
zip \
xz-utils
if [[ "${ARCH}" == "aarch64" ]]; then
apt-get -y install kmod
fi
MY_STAGE=
if [[ -f /etc/apollo.conf ]]; then
MY_STAGE="$(awk -F '=' '/^stage=/ {print $2}' /etc/apollo.conf 2>/dev/null)"
fi
if [[ "${MY_STAGE}" != "runtime" ]]; then
apt_get_update_and_install \
build-essential \
autoconf \
automake \
gcc-7 \
g++-7 \
gdb \
libtool \
patch \
pkg-config \
python3-dev \
libexpat1-dev \
linux-libc-dev
# Note(storypku):
# Set the last two packages to manually installed:
# libexpat1-dev was required by python3-dev
# linux-libc-dev was required by bazel/clang/cuda/...
fi
##----------------##
## SUDO ##
##----------------##
# sed -i /etc/sudoers -re 's/^%sudo.*/%sudo ALL=(ALL:ALL) NOPASSWD: ALL/g'
##----------------##
## default shell ##
##----------------##
# chsh -s /bin/bash
ln -s /bin/bash /bin/sh -f
##----------------##
## Python Setings |
##----------------##
update-alternatives --install /usr/bin/python python /usr/bin/python3 36
if [[ "${MY_GEO}" == "cn" ]]; then
# configure tsinghua's pypi mirror for x86_64 and aarch64
PYPI_MIRROR="https://pypi.tuna.tsinghua.edu.cn/simple"
pip3_install -i "$PYPI_MIRROR" pip -U
python3 -m pip config set global.index-url "$PYPI_MIRROR"
else
pip3_install pip -U
fi
pip3_install -U setuptools
pip3_install -U wheel
# Kick down the ladder
apt-get -y autoremove python3-pip
# Clean up cache to reduce layer size.
apt-get clean && \
[ "$IS_IN_DOCKER" == "true" ] && rm -rf /var/lib/apt/lists/*
|
((LIB = {}) => {
loadingControl: {
let is_initializing = true;
// these class names get added to document.body
// css controls what is visible through that mechanism
const StatusMode = {
DONE: "mode-done",
PROGRESS: "mode-progress",
INDETERMINATE: "mode-indeterminate",
NOTICE: "mode-notice",
};
const allModes = Object.values(StatusMode);
let currentStatusMode;
/**
* Sets a status mode (class name) on the body element
* @param {string} mode one of the `StatusMode`s
* @returns
*/
const setStatusMode = (mode = StatusMode.INDETERMINATE) => {
if (currentStatusMode === mode) {
return;
}
document.body.classList.remove(...allModes);
if (allModes.includes(mode)) {
document.body.classList.add(mode);
currentStatusMode = mode;
} else {
throw new Error(`Invalid status mode : ${mode}`);
}
};
/**
* Shows an error to the user
* @param {Error|string} err
*/
const displayFailureNotice = (err) => {
var msg = err.message || err;
console.error(msg);
setStatusMode(StatusMode.NOTICE);
const statusNotice = document.getElementById("notices");
msg.split("\n").forEach((line) => {
statusNotice.appendChild(document.createTextNode(line));
statusNotice.appendChild(document.createElement("br"));
});
is_initializing = false;
};
/**
* Callback used during the loading of the engine and packages
* @param {number} current the current amount of bytes loaded
* @param {number} total the total amount of bytes loaded
*/
const onProgress = (current, total) => {
if (total > 0) {
setStatusMode(StatusMode.PROGRESS);
document
.getElementById("loader")
.style.setProperty("--progress", (current / total) * 100 + "%");
} else {
setStatusMode(StatusMode.INDETERMINATE);
}
};
const onPackageLoaded = () => {
setStatusMode(StatusMode.DONE);
is_initializing = false;
};
/**
* Instanciates the engine, starts the package
*/
const load = () => {
setStatusMode(StatusMode.INDETERMINATE);
GODOT_CONFIG.canvasResizePolicy = 0;
const engine = new Engine(GODOT_CONFIG);
engine
.startGame({ onProgress })
.then(onPackageLoaded)
.catch(displayFailureNotice);
};
/**
* Entry point, this is run once the page loads
*/
const start = () => {
setStatusMode(StatusMode.INDETERMINATE);
if (!Engine.isWebGLAvailable()) {
displayFailureNotice("WebGL not available");
} else {
load();
}
};
LIB.startLoading = start;
LIB.displayFailureNotice = displayFailureNotice;
}
mobileHandling: {
const KEY = "force-mobile";
const forceAppOnMobile = () => {
document.body.classList.add(KEY);
localStorage.setItem(KEY, "true");
};
document
.getElementById("mobile-warning-dismiss-button")
.addEventListener("click", forceAppOnMobile);
const currentValue = JSON.parse(localStorage.getItem(KEY) || "false");
if (currentValue === true) {
forceAppOnMobile();
}
}
})((window.GDQUEST = {}));
GDQUEST.startLoading();
|
#!/bin/bash
DB_NAME="nba" DB_HOST="localhost" DB_USER=nba_sql DB_PASSWORD=nba_sql python stats/nba_sql.py --current_season_mode --database="postgres"
|
<filename>src/main/java/mastermind/views/console/menu/command/OpenSavedGameCommand.java
package mastermind.views.console.menu.command;
import mastermind.controllers.StartController;
import mastermind.views.console.menu.SavedGameSelectMenu;
public class OpenSavedGameCommand extends Command {
public OpenSavedGameCommand(StartController startController) {
super(CommandTitle.OPEN_SAVED_GAME_COMMAND, startController);
}
@Override
public void execute() {
new SavedGameSelectMenu((StartController) this.acceptorController).execute();
}
@Override
public boolean isActive() {
return true;
}
}
|
"""
Create a deep learning network to predict the price of a stock given a set of features.
"""
import tensorflow as tf
def create_model(input_shape):
model = tf.keras.models.Sequential([
tf.keras.layers.Input(shape=input_shape),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dropout(0.2),
tf.keras.layers.Dense(1)
])
model.compile(optimizer='adam',
loss=tf.keras.losses.MeanSquaredError(),
metrics=['mae', 'mse'])
return model
model = create_model((5,))
model.summary()
|
def swap_key_value(d):
swapped_dict = {value: key for key, value in d.items()}
return swapped_dict
d = {'a': 1, 'b': 2, 'c': 3}
result = swap_key_value(d)
print(result)
|
import requests
from bs4 import BeautifulSoup
def count_tags(url):
r = requests.get(url)
soup = BeautifulSoup(r.text, 'html.parser')
count = len(soup.find_all())
return count
|
<filename>spec/chewy/search/parameters/types_spec.rb<gh_stars>1-10
require 'chewy/search/parameters/string_array_storage_examples'
describe Chewy::Search::Parameters::Types do
it_behaves_like :string_array_storage, nil
end
|
import java.util.ArrayList;
import java.util.List;
public class ReverseList {
public static void main(String[] args){
List<Integer> list = new ArrayList<>();
list.add(1);
list.add(3);
list.add(7);
list.add(10);
System.out.println("Original list: " + list);
List<Integer> reversedList = new ArrayList<>();
for (int i = list.size()-1; i >= 0; i--){
reversedList.add(list.get(i));
}
System.out.println("Reversed list: " + reversedList);
}
}
|
#!/bin/bash
curl -O https://ftp.gnu.org/gnu/autoconf/autoconf-2.69.tar.gz
|
ALTER TABLE users DROP COLUMN gh_avatar;
|
class GameAgent:
def __init__(self, grid_size, target_score):
reset_game_state(grid_size)
self.grid_size = grid_size
self.target_score = target_score
self.game_state = setup()
def reset_game_state(self, grid_size):
self.board = [[0 for _ in range(grid_size)] for _ in range(grid_size)]
self.score = 0
def setup(self):
#add random numbers to starting grid
for i in range(int(0.2 * grid_size**2)):
x = random.randint(0, self.grid_size-1)
y = random.randint(0, self.grid_size-1)
self.board[x][y] = random.choice([2,4])
def make_move(self, direction):
self.score = 0
self.board = move(self.board, direction)
self.board = add_rand_num(self.board)
self.score = calculate_score(self.board)
#game logic
def move(self, board, direction):
#...
return board
def add_rand_num(self, board):
#...
return board
def calculate_score(self, board):
#...
return score
def get_best_move(self):
best_score = 0
best_move = None
for direction in ['up', 'down', 'left', 'right']:
board = deepcopy(self.board)
score = self.move_and_score(board, direction)
if score > best_score:
best_score = score
best_move = direction
if self.score >= self.target_score:
break
return best_move
|
#include <stdio.h>
#include <math.h>
typedef struct {
float x, y, z;
} vec3;
typedef struct {
float distance;
vec3 intersection_point;
} IntersectionResult;
IntersectionResult calculateIntersection(vec3 orig, vec3 dir) {
IntersectionResult result;
result.distance = INFINITY;
result.intersection_point.x = 0;
result.intersection_point.y = 0;
result.intersection_point.z = 0;
if (fabs(dir.y) > 1e-3) {
float d = -(orig.y + 4) / dir.y; // the checkerboard plane has equation y = -4
vec3 pt;
pt.x = orig.x + d * dir.x;
pt.y = orig.y + d * dir.y;
pt.z = orig.z + d * dir.z;
result.distance = d;
result.intersection_point = pt;
}
return result;
}
|
#!/bin/bash
babel -o ../dist/main.gs main.js
cd ..
gapps upload
cd src
|
#!/bin/tcsh
foreach i (`ls *|grep -v *.sh`)
echo $i;
set newname=`echo $i|awk -F '.html' '{print $1}'`
echo $newname
mv $i $newname;
end
|
<reponame>xasopheno/chai-mocha-basics
(function() {
if(!mocha) {
throw new Exception("mocha library does not exist in global namespace!");
}
/*
* Mocha Events:
*
* - `start` execution started
* - `end` execution complete
* - `suite` (suite) test suite execution started
* - `suite end` (suite) all tests (and sub-suites) have finished
* - `test` (test) test execution started
* - `test end` (test) test completed
* - `hook` (hook) hook execution started
* - `hook end` (hook) hook complete
* - `pass` (test) test passed
* - `fail` (test, err) test failed
*
*/
var OriginalReporter = mocha._reporter;
var BlanketReporter = function(runner) {
runner.on('start', function() {
blanket.setupCoverage();
});
runner.on('end', function() {
blanket.onTestsDone();
});
runner.on('suite', function() {
blanket.onModuleStart();
});
runner.on('test', function() {
blanket.onTestStart();
});
runner.on('test end', function(test) {
blanket.onTestDone(test.parent.tests.length, test.state === 'passed');
});
runner.on('hook', function(){
blanket.onTestStart();
});
runner.on('hook end', function(){
blanket.onTestsDone();
});
// NOTE: this is an instance of BlanketReporter
new OriginalReporter(runner);
};
BlanketReporter.prototype = OriginalReporter.prototype;
mocha.reporter(BlanketReporter);
var oldRun = mocha.run,
oldCallback = null;
mocha.run = function (finishCallback) {
oldCallback = finishCallback;
console.log("waiting for blanket...");
};
blanket.beforeStartTestRunner({
callback: function(){
if (!blanket.options("existingRequireJS")){
oldRun(oldCallback);
}
mocha.run = oldRun;
}
});
})();
|
#pragma once
namespace UEAA
{
class DeHashTable;
unsigned CStringToHash (const char *cString, DeHashTable *deHashTable = 0);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.