prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>pkcs11factory.go<|end_file_name|><|fim▁begin|>// +build pkcs11 /* Copyright IBM Corp. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 */ package factory import ( "encoding/hex" "github.com/hyperledger/fabric/bccsp" "github.com/hyperledger/fabric/bccsp/pkcs11" "github.com/hyperledger/fabric/bccsp/sw" "github.com/pkg/errors" ) const ( // PKCS11BasedFactoryName is the name of the factory of the hsm-based BCCSP implementation PKCS11BasedFactoryName = "PKCS11" ) // PKCS11Factory is the factory of the HSM-based BCCSP. type PKCS11Factory struct{} // Name returns the name of this factory func (f *PKCS11Factory) Name() string { return PKCS11BasedFactoryName<|fim▁hole|>} // Get returns an instance of BCCSP using Opts. func (f *PKCS11Factory) Get(config *FactoryOpts) (bccsp.BCCSP, error) { // Validate arguments if config == nil || config.PKCS11 == nil { return nil, errors.New("Invalid config. It must not be nil.") } p11Opts := *config.PKCS11 ks := sw.NewDummyKeyStore() mapper := skiMapper(p11Opts) return pkcs11.New(p11Opts, ks, pkcs11.WithKeyMapper(mapper)) } func skiMapper(p11Opts pkcs11.PKCS11Opts) func([]byte) []byte { keyMap := map[string]string{} for _, k := range p11Opts.KeyIDs { keyMap[k.SKI] = k.ID } return func(ski []byte) []byte { keyID := hex.EncodeToString(ski) if id, ok := keyMap[keyID]; ok { return []byte(id) } if p11Opts.AltID != "" { return []byte(p11Opts.AltID) } return ski } }<|fim▁end|>
<|file_name|>init.rs<|end_file_name|><|fim▁begin|>#![feature(core, collections)] extern crate core; #[cfg(test)] mod tests { use core::slice::SliceExt; // fn size_from_ptr<T>(_: *const T) -> usize { // mem::size_of::<T>() // } // macro_rules! slice_offset { // ($ptr:expr, $by:expr) => {{ // let ptr = $ptr; // if size_from_ptr(ptr) == 0 { // ::intrinsics::arith_offset(ptr as *mut i8, $by) as *mut _ // } else { // ptr.offset($by) // } // }}; // } // macro_rules! slice_ref { // ($ptr:expr) => {{ // let ptr = $ptr; // if size_from_ptr(ptr) == 0 { // // Use a non-null pointer value // &mut *(1 as *mut _) // } else { // transmute(ptr) // } // }}; // } // pub unsafe fn from_raw_parts<'a, T>(p: *const T, len: usize) -> &'a [T] { // transmute(RawSlice { data: p, len: len }) // } // macro_rules! make_slice { // ($start: expr, $end: expr) => {{ // let start = $start; // let diff = ($end as usize).wrapping_sub(start as usize); // if size_from_ptr(start) == 0 { // // use a non-null pointer value // unsafe { from_raw_parts(1 as *const _, diff) } // } else { // let len = diff / size_from_ptr(start); // unsafe { from_raw_parts(start, len) } // } // }} // } // impl<T> SliceExt for [T] { // type Item = T; // // #[inline] // fn split_at(&self, mid: usize) -> (&[T], &[T]) { // (&self[..mid], &self[mid..]) // } // // #[inline] // fn iter<'a>(&'a self) -> Iter<'a, T> { // unsafe { // let p = if mem::size_of::<T>() == 0 { // 1 as *const _ // } else { // let p = self.as_ptr(); // assume(!p.is_null()); // p // }; // // Iter { // ptr: p, // end: slice_offset!(p, self.len() as isize), // _marker: marker::PhantomData // } // } // } // // #[inline] // fn split<'a, P>(&'a self, pred: P) -> Split<'a, T, P> where P: FnMut(&T) -> bool { // Split { // v: self, // pred: pred, // finished: false // } // } // // #[inline] // fn splitn<'a, P>(&'a self, n: usize, pred: P) -> SplitN<'a, T, P> where // P: FnMut(&T) -> bool, // { // SplitN { // inner: GenericSplitN { // iter: self.split(pred), // count: n, // invert: false // } // } // } // // #[inline] // fn rsplitn<'a, P>(&'a self, n: usize, pred: P) -> RSplitN<'a, T, P> where // P: FnMut(&T) -> bool, // { // RSplitN { // inner: GenericSplitN { // iter: self.split(pred), // count: n, // invert: true // } // } // } // // #[inline] // fn windows(&self, size: usize) -> Windows<T> { // assert!(size != 0); // Windows { v: self, size: size } // } // // #[inline] // fn chunks(&self, size: usize) -> Chunks<T> { // assert!(size != 0); // Chunks { v: self, size: size } // } // // #[inline] // fn get(&self, index: usize) -> Option<&T> { // if index < self.len() { Some(&self[index]) } else { None } // } // // #[inline] // fn first(&self) -> Option<&T> { // if self.is_empty() { None } else { Some(&self[0]) } // } // // #[inline] // fn tail(&self) -> &[T] { &self[1..] } // // #[inline] // fn init(&self) -> &[T] { // &self[..self.len() - 1] // } // // #[inline] // fn last(&self) -> Option<&T> { // if self.is_empty() { None } else { Some(&self[self.len() - 1]) } // } // // #[inline] // unsafe fn get_unchecked(&self, index: usize) -> &T { // transmute(self.repr().data.offset(index as isize)) // } // // #[inline] // fn as_ptr(&self) -> *const T { // self.repr().data // } // // #[unstable(feature = "core")] // fn binary_search_by<F>(&self, mut f: F) -> Result<usize, usize> where // F: FnMut(&T) -> Ordering // { // let mut base : usize = 0; // let mut lim : usize = self.len(); // // while lim != 0 { // let ix = base + (lim >> 1); // match f(&self[ix]) { // Equal => return Ok(ix), // Less => { // base = ix + 1; // lim -= 1; // } // Greater => () // } // lim >>= 1; // } // Err(base) // } // // #[inline] // fn len(&self) -> usize { self.repr().len } // // #[inline] // fn get_mut(&mut self, index: usize) -> Option<&mut T> { // if index < self.len() { Some(&mut self[index]) } else { None } // } // // #[inline] // fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) { // unsafe { // let self2: &mut [T] = mem::transmute_copy(&self); // // (ops::IndexMut::index_mut(self, ops::RangeTo { end: mid } ), // ops::IndexMut::index_mut(self2, ops::RangeFrom { start: mid } )) // } // } // // #[inline] // fn iter_mut<'a>(&'a mut self) -> IterMut<'a, T> { // unsafe { // let p = if mem::size_of::<T>() == 0 { // 1 as *mut _ // } else { // let p = self.as_mut_ptr(); // assume(!p.is_null()); // p // }; // // IterMut { // ptr: p, // end: slice_offset!(p, self.len() as isize), // _marker: marker::PhantomData // } // } // } // // #[inline] // fn last_mut(&mut self) -> Option<&mut T> { // let len = self.len(); // if len == 0 { return None; } // Some(&mut self[len - 1]) // } // // #[inline] // fn first_mut(&mut self) -> Option<&mut T> { // if self.is_empty() { None } else { Some(&mut self[0]) } // } // // #[inline] // fn tail_mut(&mut self) -> &mut [T] { // &mut self[1 ..] // } // // #[inline] // fn init_mut(&mut self) -> &mut [T] { // let len = self.len(); // &mut self[.. (len - 1)] // } // // #[inline] // fn split_mut<'a, P>(&'a mut self, pred: P) -> SplitMut<'a, T, P> where P: FnMut(&T) -> bool { // SplitMut { v: self, pred: pred, finished: false } // } // // #[inline] // fn splitn_mut<'a, P>(&'a mut self, n: usize, pred: P) -> SplitNMut<'a, T, P> where // P: FnMut(&T) -> bool // { // SplitNMut { // inner: GenericSplitN { // iter: self.split_mut(pred), // count: n, // invert: false // } // } // } // // #[inline] // fn rsplitn_mut<'a, P>(&'a mut self, n: usize, pred: P) -> RSplitNMut<'a, T, P> where // P: FnMut(&T) -> bool, // { // RSplitNMut { // inner: GenericSplitN { // iter: self.split_mut(pred), // count: n, // invert: true // } // } // } // // #[inline] // fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<T> { // assert!(chunk_size > 0); // ChunksMut { v: self, chunk_size: chunk_size } // } // // #[inline] // fn swap(&mut self, a: usize, b: usize) { // unsafe { // // Can't take two mutable loans from one vector, so instead just cast // // them to their raw pointers to do the swap // let pa: *mut T = &mut self[a]; // let pb: *mut T = &mut self[b]; // ptr::swap(pa, pb); // } // } // // fn reverse(&mut self) { // let mut i: usize = 0; // let ln = self.len(); // while i < ln / 2 { // // Unsafe swap to avoid the bounds check in safe swap. // unsafe { // let pa: *mut T = self.get_unchecked_mut(i); // let pb: *mut T = self.get_unchecked_mut(ln - i - 1); // ptr::swap(pa, pb); // } // i += 1; // } // } // // #[inline] // unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut T { // transmute((self.repr().data as *mut T).offset(index as isize)) // } // // #[inline] // fn as_mut_ptr(&mut self) -> *mut T { // self.repr().data as *mut T // } // // #[inline] // fn position_elem(&self, x: &T) -> Option<usize> where T: PartialEq { // self.iter().position(|y| *x == *y) // } // // #[inline] // fn rposition_elem(&self, t: &T) -> Option<usize> where T: PartialEq { // self.iter().rposition(|x| *x == *t) // } // // #[inline] // fn contains(&self, x: &T) -> bool where T: PartialEq { // self.iter().any(|elt| *x == *elt) // } // // #[inline] // fn starts_with(&self, needle: &[T]) -> bool where T: PartialEq { // let n = needle.len(); // self.len() >= n && needle == &self[..n] // } // // #[inline] // fn ends_with(&self, needle: &[T]) -> bool where T: PartialEq { // let (m, n) = (self.len(), needle.len()); // m >= n && needle == &self[m-n..] // } // // #[unstable(feature = "core")] // fn binary_search(&self, x: &T) -> Result<usize, usize> where T: Ord { // self.binary_search_by(|p| p.cmp(x)) // } // // #[unstable(feature = "core")] // fn next_permutation(&mut self) -> bool where T: Ord { // // These cases only have 1 permutation each, so we can't do anything. // if self.len() < 2 { return false; } // // // Step 1: Identify the longest, rightmost weakly decreasing part of the vector // let mut i = self.len() - 1; // while i > 0 && self[i-1] >= self[i] { // i -= 1; // } // // // If that is the entire vector, this is the last-ordered permutation. // if i == 0 { // return false; // } // // // Step 2: Find the rightmost element larger than the pivot (i-1) // let mut j = self.len() - 1; // while j >= i && self[j] <= self[i-1] { // j -= 1;<|fim▁hole|> // // Step 3: Swap that element with the pivot // self.swap(j, i-1); // // // Step 4: Reverse the (previously) weakly decreasing part // self[i..].reverse(); // // true // } // // #[unstable(feature = "core")] // fn prev_permutation(&mut self) -> bool where T: Ord { // // These cases only have 1 permutation each, so we can't do anything. // if self.len() < 2 { return false; } // // // Step 1: Identify the longest, rightmost weakly increasing part of the vector // let mut i = self.len() - 1; // while i > 0 && self[i-1] <= self[i] { // i -= 1; // } // // // If that is the entire vector, this is the first-ordered permutation. // if i == 0 { // return false; // } // // // Step 2: Reverse the weakly increasing part // self[i..].reverse(); // // // Step 3: Find the rightmost element equal to or bigger than the pivot (i-1) // let mut j = self.len() - 1; // while j >= i && self[j-1] < self[i-1] { // j -= 1; // } // // // Step 4: Swap that element with the pivot // self.swap(i-1, j); // // true // } // // #[inline] // fn clone_from_slice(&mut self, src: &[T]) -> usize where T: Clone { // let min = cmp::min(self.len(), src.len()); // let dst = &mut self[.. min]; // let src = &src[.. min]; // for i in 0..min { // dst[i].clone_from(&src[i]); // } // min // } // } type T = i32; #[test] #[should_panic] fn init_test1() { let slice: &[T] = &[]; let _: &[T] = slice.init(); // panicked at 'arithmetic operation overflowed' } #[test] fn init_test2() { let slice: &[T] = &[11, 12, 12, 13, 14, 15, 16]; let init: &[T] = slice.init(); assert_eq!(init, &[11, 12, 12, 13, 14, 15]); } }<|fim▁end|>
// } //
<|file_name|>babylon.animation.ts<|end_file_name|><|fim▁begin|>module BABYLON { export class Animation { private _keys: Array<any>; private _offsetsCache = {}; private _highLimitsCache = {}; private _stopped = false; public _target; private _easingFunction: IEasingFunction; public targetPropertyPath: string[]; public currentFrame: number; public static CreateAndStartAnimation(name: string, mesh: AbstractMesh, tartgetProperty: string, framePerSecond: number, totalFrame: number, from: any, to: any, loopMode?: number) { var dataType = undefined; if (!isNaN(parseFloat(from)) && isFinite(from)) { dataType = Animation.ANIMATIONTYPE_FLOAT; } else if (from instanceof Quaternion) { dataType = Animation.ANIMATIONTYPE_QUATERNION; } else if (from instanceof Vector3) { dataType = Animation.ANIMATIONTYPE_VECTOR3; } else if (from instanceof Vector2) { dataType = Animation.ANIMATIONTYPE_VECTOR2; } else if (from instanceof Color3) { dataType = Animation.ANIMATIONTYPE_COLOR3; } if (dataType == undefined) { return null; } var animation = new Animation(name, tartgetProperty, framePerSecond, dataType, loopMode); var keys = []; keys.push({ frame: 0, value: from }); keys.push({ frame: totalFrame, value: to }); animation.setKeys(keys); mesh.animations.push(animation); return mesh.getScene().beginAnimation(mesh, 0, totalFrame,(animation.loopMode === 1)); } constructor(public name: string, public targetProperty: string, public framePerSecond: number, public dataType: number, public loopMode?: number) { this.targetPropertyPath = targetProperty.split("."); this.dataType = dataType; this.loopMode = loopMode === undefined ? Animation.ANIMATIONLOOPMODE_CYCLE : loopMode; } // Methods public isStopped(): boolean { return this._stopped; } public getKeys(): any[] { return this._keys; } public getEasingFunction() { return this._easingFunction; } public setEasingFunction(easingFunction: EasingFunction) { this._easingFunction = easingFunction; } public floatInterpolateFunction(startValue: number, endValue: number, gradient: number): number { return startValue + (endValue - startValue) * gradient; } public quaternionInterpolateFunction(startValue: Quaternion, endValue: Quaternion, gradient: number): Quaternion { return Quaternion.Slerp(startValue, endValue, gradient); } public vector3InterpolateFunction(startValue: Vector3, endValue: Vector3, gradient: number): Vector3 { return Vector3.Lerp(startValue, endValue, gradient); } public vector2InterpolateFunction(startValue: Vector2, endValue: Vector2, gradient: number): Vector2 { return Vector2.Lerp(startValue, endValue, gradient); } public color3InterpolateFunction(startValue: Color3, endValue: Color3, gradient: number): Color3 { return Color3.Lerp(startValue, endValue, gradient); } public matrixInterpolateFunction(startValue: Matrix, endValue: Matrix, gradient: number): Matrix { var startScale = new Vector3(0, 0, 0); var startRotation = new Quaternion(); var startTranslation = new Vector3(0, 0, 0); startValue.decompose(startScale, startRotation, startTranslation); var endScale = new Vector3(0, 0, 0); var endRotation = new Quaternion(); var endTranslation = new Vector3(0, 0, 0); endValue.decompose(endScale, endRotation, endTranslation); var resultScale = this.vector3InterpolateFunction(startScale, endScale, gradient); var resultRotation = this.quaternionInterpolateFunction(startRotation, endRotation, gradient); var resultTranslation = this.vector3InterpolateFunction(startTranslation, endTranslation, gradient); var result = Matrix.Compose(resultScale, resultRotation, resultTranslation); return result; } public clone(): Animation { var clone = new Animation(this.name, this.targetPropertyPath.join("."), this.framePerSecond, this.dataType, this.loopMode); clone.setKeys(this._keys); return clone; } public setKeys(values: Array<any>): void { this._keys = values.slice(0); this._offsetsCache = {}; this._highLimitsCache = {}; } private _getKeyValue(value: any): any { if (typeof value === "function") { return value(); } return value; } private _interpolate(currentFrame: number, repeatCount: number, loopMode: number, offsetValue?, highLimitValue?) { if (loopMode === Animation.ANIMATIONLOOPMODE_CONSTANT && repeatCount > 0) { return highLimitValue.clone ? highLimitValue.clone() : highLimitValue; } this.currentFrame = currentFrame; // Try to get a hash to find the right key var startKey = Math.max(0, Math.min(this._keys.length - 1, Math.floor(this._keys.length * (currentFrame - this._keys[0].frame) / (this._keys[this._keys.length - 1].frame - this._keys[0].frame)) - 1)); if (this._keys[startKey].frame >= currentFrame) { while (startKey - 1 >= 0 && this._keys[startKey].frame >= currentFrame) { startKey--; } } for (var key = startKey; key < this._keys.length ; key++) { if (this._keys[key + 1].frame >= currentFrame) { var startValue = this._getKeyValue(this._keys[key].value); var endValue = this._getKeyValue(this._keys[key + 1].value); // gradient : percent of currentFrame between the frame inf and the frame sup var gradient = (currentFrame - this._keys[key].frame) / (this._keys[key + 1].frame - this._keys[key].frame); // check for easingFunction and correction of gradient if (this._easingFunction != null) { gradient = this._easingFunction.ease(gradient); } switch (this.dataType) { // Float case Animation.ANIMATIONTYPE_FLOAT: switch (loopMode) { case Animation.ANIMATIONLOOPMODE_CYCLE: case Animation.ANIMATIONLOOPMODE_CONSTANT: return this.floatInterpolateFunction(startValue, endValue, gradient); case Animation.ANIMATIONLOOPMODE_RELATIVE: return offsetValue * repeatCount + this.floatInterpolateFunction(startValue, endValue, gradient); } break; // Quaternion case Animation.ANIMATIONTYPE_QUATERNION: var quaternion = null; switch (loopMode) { case Animation.ANIMATIONLOOPMODE_CYCLE: case Animation.ANIMATIONLOOPMODE_CONSTANT: quaternion = this.quaternionInterpolateFunction(startValue, endValue, gradient); break; case Animation.ANIMATIONLOOPMODE_RELATIVE: quaternion = this.quaternionInterpolateFunction(startValue, endValue, gradient).add(offsetValue.scale(repeatCount)); break; } return quaternion; // Vector3 case Animation.ANIMATIONTYPE_VECTOR3: switch (loopMode) { case Animation.ANIMATIONLOOPMODE_CYCLE: case Animation.ANIMATIONLOOPMODE_CONSTANT: return this.vector3InterpolateFunction(startValue, endValue, gradient); case Animation.ANIMATIONLOOPMODE_RELATIVE: return this.vector3InterpolateFunction(startValue, endValue, gradient).add(offsetValue.scale(repeatCount)); } // Vector2 case Animation.ANIMATIONTYPE_VECTOR2: switch (loopMode) { case Animation.ANIMATIONLOOPMODE_CYCLE: case Animation.ANIMATIONLOOPMODE_CONSTANT: return this.vector2InterpolateFunction(startValue, endValue, gradient); case Animation.ANIMATIONLOOPMODE_RELATIVE: return this.vector2InterpolateFunction(startValue, endValue, gradient).add(offsetValue.scale(repeatCount)); } // Color3 case Animation.ANIMATIONTYPE_COLOR3: switch (loopMode) { case Animation.ANIMATIONLOOPMODE_CYCLE: case Animation.ANIMATIONLOOPMODE_CONSTANT: return this.color3InterpolateFunction(startValue, endValue, gradient); case Animation.ANIMATIONLOOPMODE_RELATIVE: return this.color3InterpolateFunction(startValue, endValue, gradient).add(offsetValue.scale(repeatCount)); } // Matrix case Animation.ANIMATIONTYPE_MATRIX: switch (loopMode) { case Animation.ANIMATIONLOOPMODE_CYCLE: case Animation.ANIMATIONLOOPMODE_CONSTANT: // return this.matrixInterpolateFunction(startValue, endValue, gradient); case Animation.ANIMATIONLOOPMODE_RELATIVE: return startValue; } default: break; } break; } } return this._getKeyValue(this._keys[this._keys.length - 1].value); } public animate(delay: number, from: number, to: number, loop: boolean, speedRatio: number): boolean { if (!this.targetPropertyPath || this.targetPropertyPath.length < 1) { this._stopped = true; return false; } var returnValue = true; // Adding a start key at frame 0 if missing if (this._keys[0].frame !== 0) { var newKey = { frame: 0, value: this._keys[0].value }; this._keys.splice(0, 0, newKey); } // Check limits if (from < this._keys[0].frame || from > this._keys[this._keys.length - 1].frame) { from = this._keys[0].frame; } if (to < this._keys[0].frame || to > this._keys[this._keys.length - 1].frame) { to = this._keys[this._keys.length - 1].frame; } // Compute ratio var range = to - from; var offsetValue; // ratio represents the frame delta between from and to var ratio = delay * (this.framePerSecond * speedRatio) / 1000.0; var highLimitValue = 0; if (ratio > range && !loop) { // If we are out of range and not looping get back to caller returnValue = false; highLimitValue = this._getKeyValue(this._keys[this._keys.length - 1].value); } else { // Get max value if required if (this.loopMode !== Animation.ANIMATIONLOOPMODE_CYCLE) { var keyOffset = to.toString() + from.toString(); if (!this._offsetsCache[keyOffset]) { var fromValue = this._interpolate(from, 0, Animation.ANIMATIONLOOPMODE_CYCLE); var toValue = this._interpolate(to, 0, Animation.ANIMATIONLOOPMODE_CYCLE); switch (this.dataType) { // Float case Animation.ANIMATIONTYPE_FLOAT: this._offsetsCache[keyOffset] = toValue - fromValue; break; // Quaternion case Animation.ANIMATIONTYPE_QUATERNION: this._offsetsCache[keyOffset] = toValue.subtract(fromValue); break; // Vector3 case Animation.ANIMATIONTYPE_VECTOR3: this._offsetsCache[keyOffset] = toValue.subtract(fromValue); // Vector2 case Animation.ANIMATIONTYPE_VECTOR2: this._offsetsCache[keyOffset] = toValue.subtract(fromValue); // Color3 case Animation.ANIMATIONTYPE_COLOR3: this._offsetsCache[keyOffset] = toValue.subtract(fromValue); default: break; } this._highLimitsCache[keyOffset] = toValue; } highLimitValue = this._highLimitsCache[keyOffset]; offsetValue = this._offsetsCache[keyOffset]; } } if (offsetValue === undefined) { switch (this.dataType) { // Float case Animation.ANIMATIONTYPE_FLOAT: offsetValue = 0; break; // Quaternion case Animation.ANIMATIONTYPE_QUATERNION: offsetValue = new Quaternion(0, 0, 0, 0); break; // Vector3 case Animation.ANIMATIONTYPE_VECTOR3: offsetValue = Vector3.Zero(); break; // Vector2 case Animation.ANIMATIONTYPE_VECTOR2: offsetValue = Vector2.Zero(); break; // Color3 case Animation.ANIMATIONTYPE_COLOR3: offsetValue = Color3.Black(); } } // Compute value var repeatCount = (ratio / range) >> 0; var currentFrame = returnValue ? from + ratio % range : to; var currentValue = this._interpolate(currentFrame, repeatCount, this.loopMode, offsetValue, highLimitValue); // Set value if (this.targetPropertyPath.length > 1) { var property = this._target[this.targetPropertyPath[0]]; for (var index = 1; index < this.targetPropertyPath.length - 1; index++) { property = property[this.targetPropertyPath[index]]; } property[this.targetPropertyPath[this.targetPropertyPath.length - 1]] = currentValue; } else { this._target[this.targetPropertyPath[0]] = currentValue; } if (this._target.markAsDirty) { this._target.markAsDirty(this.targetProperty); } if (!returnValue) { this._stopped = true; } return returnValue; } // Statics private static _ANIMATIONTYPE_FLOAT = 0; private static _ANIMATIONTYPE_VECTOR3 = 1; private static _ANIMATIONTYPE_QUATERNION = 2; private static _ANIMATIONTYPE_MATRIX = 3; private static _ANIMATIONTYPE_COLOR3 = 4; private static _ANIMATIONTYPE_VECTOR2 = 5; private static _ANIMATIONLOOPMODE_RELATIVE = 0; private static _ANIMATIONLOOPMODE_CYCLE = 1; private static _ANIMATIONLOOPMODE_CONSTANT = 2; public static get ANIMATIONTYPE_FLOAT(): number { return Animation._ANIMATIONTYPE_FLOAT; } public static get ANIMATIONTYPE_VECTOR3(): number { return Animation._ANIMATIONTYPE_VECTOR3; } public static get ANIMATIONTYPE_VECTOR2(): number { return Animation._ANIMATIONTYPE_VECTOR2; } public static get ANIMATIONTYPE_QUATERNION(): number { return Animation._ANIMATIONTYPE_QUATERNION; }<|fim▁hole|> public static get ANIMATIONTYPE_MATRIX(): number { return Animation._ANIMATIONTYPE_MATRIX; } public static get ANIMATIONTYPE_COLOR3(): number { return Animation._ANIMATIONTYPE_COLOR3; } public static get ANIMATIONLOOPMODE_RELATIVE(): number { return Animation._ANIMATIONLOOPMODE_RELATIVE; } public static get ANIMATIONLOOPMODE_CYCLE(): number { return Animation._ANIMATIONLOOPMODE_CYCLE; } public static get ANIMATIONLOOPMODE_CONSTANT(): number { return Animation._ANIMATIONLOOPMODE_CONSTANT; } } }<|fim▁end|>
<|file_name|>Solution.java<|end_file_name|><|fim▁begin|>package com.mgireesh; public class Solution extends VersionControl { public int firstBadVersion(int n) { int badVersion = 0;<|fim▁hole|> int end = n; while (start < end) { int mid = start + (end - start) / 2; if (isBadVersion(mid)) { end = mid; } else { start = mid + 1; } } return start; } }<|fim▁end|>
int start = 1;
<|file_name|>tagger.js<|end_file_name|><|fim▁begin|>let BASE_DIR = '/masn01-archive/'; const TAG_OPTIONS = ['meteor', 'cloud', 'bug', 'misc']; let CURR_DIR = null; let CURR_FILES = null; let INIT_CMAP = null; let CURR_IDX = 0; let PREV_IDX = null; $(async function() { let cameras = JSON.parse(await $.get('cameras.php')); cameras.forEach((camera) => { $('#masn-switch').append(`<option value='${camera}/'>${camera}</option>`); }); BASE_DIR = $('#masn-switch').val(); JS9.ResizeDisplay(750, 750); TAG_OPTIONS.forEach(tag => $('#tag-select').append(`<option value='${tag}'>${tag}</option>`)); $('#datepicker').prop('disabled', true); let result = await $.get(BASE_DIR); let years = getDirectories(result, /\d{4}/); console.log(years); new Pikaday({ field: document.getElementById('datepicker'), format: 'YYYY-MM-DD', minDate: moment(`${years[0]}-01-01`, 'YYYY-MM-DD').toDate(), maxDate: moment(`${years[years.length-1]}-12-31`, 'YYYY-MM-DD').toDate(), defaultDate: moment(`2018-11-20`).toDate(), onSelect: renderDate, onDraw: async function(evt) { let { year, month } = evt.calendars[0]; let { tabs, days } = await $.get(`stats.php?y=${year}&m=${String(month + 1).padStart(2, '0')}`); let renderedDays = $('.pika-lendar tbody td').filter('[data-day]'); renderedDays.each((_, elem) => { let dateStr = moment({ day: $(elem).data('day'), month: month, year: year }).format('YYYY-MM-DD'); if (days.indexOf(dateStr) !== -1) { let dateTab = tabs[days.indexOf(dateStr)]; $(elem).attr('data-tab', dateTab); if (0 <= dateTab && dateTab < POOR_LIM) $(elem).addClass('day-poor'); else if (POOR_LIM <= dateTab && dateTab < MEDIUM_LIM) $(elem).addClass('day-medium'); else if (MEDIUM_LIM <= dateTab && dateTab < GOOD_LIM) $(elem).addClass('day-good'); } }); } }); $('#datepicker').prop('disabled', false); $('#fileprev').click(function() { if (CURR_FILES == null) return; CURR_IDX = CURR_IDX - 1 < 0 ? CURR_FILES.length - 1 : CURR_IDX - 1; $('#slider').slider('value', CURR_IDX + 1); renderCurrentFile(); }); $('#filenext').click(function() { if (CURR_FILES == null) return; CURR_IDX = CURR_IDX + 1 >= CURR_FILES.length - 1 ? 0 : CURR_IDX + 1; $('#slider').slider('value', CURR_IDX + 1); renderCurrentFile(); }); $('#action-tag').click(function() { let selectedRegions = JS9.GetRegions('selected'); if (selectedRegions.length === 1) { $('#tag-select')[0].selectedIndex = 0; $('#tag-modal').show(); } else if (selectedRegions.length > 1) { alert('Please select only one region.'); } else { alert('Please select a region.'); } }); $('#tag-select').change(function(evt) { let tag = $(this).val(); if (tag.trim() != '') { JS9.ChangeRegions('selected', { text: tag, data: { tag: tag } }); saveCurrentRegions(); } $('#tag-modal').hide(); }); $('#action-reset').click(function() { if (INIT_CMAP == null) return; JS9.SetColormap(INIT_CMAP.colormap, INIT_CMAP.contrast, INIT_CMAP.bias); }); $('#action-save').click(function() { saveCurrentRegions(); alert('All changes saved.'); }); $('#action-info').click(function() { $('#info-modal').show(); }); $('.modal-close').click(function() { $('.modal').hide(); }); $(window).keydown(function(evt) { if (evt.which === 8 && JS9.GetImageData(true)) saveCurrentRegions(); if (evt.which === 27) $('.modal').hide(); }); }); function createSlider() { let handle = $('#fits-handle'); handle.text(1); $('#slider').slider({ value: 1, min: 1, max: CURR_FILES.length, change: function(evt, ui) { handle.text(ui.value); CURR_IDX = ui.value - 1; renderCurrentFile(); }, slide: function(evt, ui) { handle.text(ui.value); } }); } function getDirectories(html, regex) { let parser = new DOMParser(); let root = parser.parseFromString(html, 'text/html'); let links = [].slice.call(root.getElementsByTagName('a')); let hrefs = links.map(link => { let directory = link.href.endsWith('/'); let dest = (directory ? link.href.slice(0, -1) : link.href).split('/').pop(); return dest.match(regex) ? dest : null; }).filter(e => e != null); return hrefs; } function renderCurrentFile() { if (PREV_IDX == CURR_IDX) return; if (CURR_FILES == null) return; PREV_IDX = CURR_IDX; let currentFile = CURR_FILES[CURR_IDX]; let currPath = `${CURR_DIR}/${currentFile}`; JS9.CloseImage(); PREV_ZOOM = null; PREV_PAN = null; $('.JS9PluginContainer').each((idx, elem) => { if($(elem).find('.tag-toggle, #tag-overlay').length === 0) { $(elem).append(`<div class='tag-toggle'></div>`); } }); JS9.globalOpts.menuBar = ['scale']; JS9.globalOpts.toolBar = ['box', 'circle', 'ellipse', 'zoom+', 'zoom-', 'zoomtofit']; JS9.SetToolbar('init'); JS9.Load(currPath, { zoom: 'ToFit', onload: async function() { let fileData = JSON.parse(await $.get({ url: 'regions.php', cache: false<|fim▁hole|> })); if (Object.keys(fileData).length > 0) { fileData.params = JSON.parse(fileData.params); fileData.params.map(region => { if (region.data.tag) region.text = region.data.tag; return region; }); JS9.AddRegions(fileData.params); } JS9.SetZoom('ToFit'); if (JS9.GetFlip() === 'none') JS9.SetFlip('x'); CENTER_PAN = JS9.GetPan(); INIT_CMAP = JS9.GetColormap(); console.log(CENTER_PAN); $('#viewer-container').show(); $('#actions').show(); $('#filename').text(`${currentFile} (${CURR_IDX + 1}/${CURR_FILES.length})`); $('#filetime').show(); updateSkymap(currentFile); } }); } async function renderDate(date) { $('#filename').text('Loading...'); let dateStr = moment(date).format('YYYY-MM-DD'); let yearDir = dateStr.substring(0, 4); let monthDir = dateStr.substring(0, 7); let parentDir = `${BASE_DIR}${yearDir}/${monthDir}/${dateStr}` let list; try { list = await $.get(parentDir); } catch (error) { list = null; } let entries = getDirectories(list, /\.fits?/); console.log(entries); PREV_IDX = null; CURR_IDX = 0; CURR_DIR = parentDir; CURR_FILES = entries; if (list) { $('#skytab').show().attr('src', `${parentDir}/sky.tab.thumb.png`); createSlider(); renderCurrentFile(); } else { $('#skytab').hide(); $('#filename').text('No data.'); $('#filetime').hide(); $('#viewer-container').hide(); $('#actions').hide(); } } function saveCurrentRegions() { let regions = JS9.GetRegions('all'); let tags = JS9.GetRegions('all').map(region => region.data ? region.data.tag : null).filter(tag => tag != null); $.get({ url: 'regions.php', cache: false }, { action: 'update', path: CURR_FILES[CURR_IDX], tags: tags.join(','), params: JSON.stringify(regions) }).then(response => { if (response.trim() !== '') { alert(`Error saving regions: ${response}`); } }); }<|fim▁end|>
}, { action: 'list', path: currentFile
<|file_name|>TimezoneModel.js<|end_file_name|><|fim▁begin|>Ext.define('Onlineshopping.onlineshopping.shared.shop.model.location.TimezoneModel', { "extend": "Ext.data.Model", "fields": [{ "name": "primaryKey", "type": "string", "defaultValue": "" }, { "name": "timeZoneId", "type": "string", "defaultValue": "" }, { "name": "utcdifference", "type": "int", "defaultValue": "" }, { "name": "gmtLabel", "type": "string", "defaultValue": "" }, { "name": "timeZoneLabel", "type": "string", "defaultValue": "" }, { "name": "country", "type": "string", "defaultValue": "" }, {<|fim▁hole|> "name": "versionId", "type": "int", "defaultValue": "" }, { "name": "entityAudit", "reference": "EntityAudit" }, { "name": "primaryDisplay", "type": "string", "defaultValue": "" }] });<|fim▁end|>
"name": "cities", "type": "string", "defaultValue": "" }, {
<|file_name|>test_abstract.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals try: import unittest2 as unittest except ImportError: import unittest from rpaths import unicode, PY3, AbstractPath, PosixPath, WindowsPath class TestAbstract(unittest.TestCase): def test_construct(self): """Tests building an AbstractPath.""" with self.assertRaises(RuntimeError): AbstractPath('path/to/something') <|fim▁hole|> """Tests building paths.""" self.assertEqual(WindowsPath('C:\\', WindowsPath('some/dir'), 'with', 'files.txt').path, 'C:\\some\\dir\\with\\files.txt') with self.assertRaises(TypeError): WindowsPath(WindowsPath('C:\\somedir'), PosixPath('file.sh')) self.assertEqual((WindowsPath('Users\\R\xE9mi/Desktop') / WindowsPath(b'pictures/m\xE9chant.jpg')).path, 'Users\\R\xE9mi\\Desktop\\pictures\\m\xE9chant.jpg') self.assertEqual((WindowsPath('C:\\dir') / WindowsPath('D:\\other')).path, 'D:\\other') def test_plus(self): """Tests the plus operator.""" self.assertEqual((WindowsPath('some\\file.txt') + '.bak').path, 'some\\file.txt.bak') with self.assertRaises(TypeError): WindowsPath('some\\file.txt') + WindowsPath('.bak') with self.assertRaises(ValueError): WindowsPath('some\\file.txt') + '.bak/kidding' with self.assertRaises(ValueError): WindowsPath('some\\file.txt') + '/backup' def test_str(self): """Tests getting string representations (repr/bytes/unicode).""" latin = WindowsPath('C:\\r\xE9mi') nonlatin = WindowsPath('C:\\you like\u203D.txt') # repr() self.assertEqual(repr(latin), "WindowsPath(u'C:\\\\r\\xe9mi')") self.assertEqual(repr(nonlatin), "WindowsPath(u'C:\\\\you like\\u203d.txt')") # bytes() self.assertEqual(bytes(latin), b'C:\\r\xe9mi') self.assertEqual(bytes(nonlatin), b'C:\\you like?.txt') # unicode() self.assertEqual(unicode(latin), 'C:\\r\xe9mi') self.assertEqual(unicode(nonlatin), 'C:\\you like\u203d.txt') def test_parts(self): """Tests parent, ancestor, name, stem, ext.""" relative = WindowsPath('directory/users\\r\xE9mi/file.txt') absolute = WindowsPath('\\some/other\\thing.h\xE9h\xE9') self.assertEqual(relative.parent.path, 'directory\\users\\r\xE9mi') self.assertEqual(absolute.parent.path, '\\some\\other') self.assertEqual(absolute.ancestor(10).path, '\\') self.assertEqual(relative.name, 'file.txt') self.assertEqual(absolute.name, 'thing.h\xE9h\xE9') self.assertEqual(absolute.unicodename, 'thing.h\xE9h\xE9') self.assertEqual(absolute.stem, 'thing') self.assertEqual(absolute.ext, '.h\xE9h\xE9') self.assertEqual(relative._components(), ['directory', 'users', 'r\xE9mi', 'file.txt']) self.assertEqual(absolute._components(), ['\\', 'some', 'other', 'thing.h\xE9h\xE9']) def test_root(self): """Tests roots, drives and UNC shares.""" a = WindowsPath(b'some/relative/path') b = WindowsPath('alsorelative') c = WindowsPath(b'/this/is/absolute') d = WindowsPath('C:\\') e = WindowsPath(b'C:\\also/absolute') f = WindowsPath('\\\\SOMEMACHINE\\share\\some\\file') def split_root(f): return tuple(p.path for p in f.split_root()) self.assertEqual(split_root(a), ('.', 'some\\relative\\path')) self.assertEqual(split_root(b), ('.', 'alsorelative')) self.assertFalse(b.is_absolute) self.assertEqual(split_root(c), ('\\', 'this\\is\\absolute')) self.assertTrue(c.is_absolute) self.assertEqual(split_root(d), ('C:\\', '.')) self.assertTrue(d.is_absolute) self.assertEqual(d.root.path, 'C:\\') self.assertEqual(split_root(e), ('C:\\', 'also\\absolute')) # FIXME : normpath() doesn't behave consistently: puts \ at the end on # PY3, not on PY2. self.assertIn(split_root(f), [('\\\\SOMEMACHINE\\share', 'some\\file'), ('\\\\SOMEMACHINE\\share\\', 'some\\file')]) def test_rel_path_to(self): """Tests the rel_path_to method.""" self.assertEqual(WindowsPath('.').rel_path_to(WindowsPath('')).path, '.') self.assertEqual(WindowsPath('\\var\\log\\apache2\\').rel_path_to( '\\var\\www\\cat.jpg').path, '..\\..\\www\\cat.jpg') self.assertEqual(WindowsPath('C:\\var\\log\\apache2\\').rel_path_to( 'C:\\tmp\\access.log').path, '..\\..\\..\\tmp\\access.log') self.assertEqual(WindowsPath('var\\log').rel_path_to( 'var\\log\\apache2\\access.log').path, 'apache2\\access.log') self.assertEqual(WindowsPath('\\var\\log\\apache2').rel_path_to( '\\var\\log\\apache2').path, '.') self.assertEqual(WindowsPath('C:\\').rel_path_to( 'C:\\var\\log\\apache2\\access.log').path, 'var\\log\\apache2\\access.log') self.assertEqual(WindowsPath('\\tmp\\secretdir\\').rel_path_to( '\\').path, '..\\..') self.assertEqual(WindowsPath('C:\\tmp\\secretdir\\').rel_path_to( 'D:\\other\\file.txt').path, 'D:\\other\\file.txt') with self.assertRaises(TypeError): WindowsPath('C:\\mydir\\').rel_path_to(PosixPath('/tmp/file')) def test_lies_under(self): """Tests the lies_under method.""" self.assertTrue(WindowsPath('\\tmp') .lies_under('\\')) self.assertFalse(WindowsPath('C:\\tmp') .lies_under('C:\\var')) self.assertFalse(WindowsPath('\\tmp') .lies_under('C:\\tmp')) self.assertFalse(WindowsPath('C:\\') .lies_under('D:\\tmp')) self.assertTrue(WindowsPath('\\tmp\\some\\file\\here') .lies_under('\\tmp\\some')) self.assertFalse(WindowsPath('\\tmp\\some\\file\\here') .lies_under('\\tmp\\no')) self.assertFalse(WindowsPath('C:\\tmp\\some\\file\\here') .lies_under('C:\\no\\tmp\\some')) self.assertFalse(WindowsPath('\\tmp\\some\\file\\here') .lies_under('\\no\\some')) self.assertTrue(WindowsPath('C:\\tmp\\some\\file\\here') .lies_under('C:\\tmp\\some\\file\\here')) self.assertTrue(WindowsPath('\\') .lies_under('\\')) self.assertTrue(WindowsPath('') .lies_under('')) self.assertTrue(WindowsPath('test') .lies_under('')) self.assertFalse(WindowsPath('') .lies_under('test')) self.assertFalse(WindowsPath('test') .lies_under('\\')) def test_comparisons(self): """Tests the comparison operators.""" self.assertTrue(WindowsPath('\\tmp') == WindowsPath('\\tmp')) self.assertFalse(WindowsPath('C:\\file') != 'c:\\FILE') self.assertTrue('c:\\FILE' == WindowsPath('C:\\file')) self.assertFalse(WindowsPath('C:\\file') == WindowsPath('C:\\dir')) self.assertFalse(WindowsPath('some/file') == PosixPath('some/file')) self.assertTrue(WindowsPath('path/to/file1') < 'path/to/file2') self.assertFalse('path/to/file1' >= WindowsPath('path/to/file2')) if PY3: with self.assertRaises(TypeError): WindowsPath('some/file') < PosixPath('other/file') class TestPosix(unittest.TestCase): """Tests for PosixPath. """ def test_construct(self): """Tests building paths.""" self.assertEqual(PosixPath('/', PosixPath(b'r\xE9mis/dir'), 'with', 'files.txt').path, b'/r\xE9mis/dir/with/files.txt') with self.assertRaises(TypeError): PosixPath('/tmp/test', WindowsPath('folder'), 'cat.gif') self.assertEqual((PosixPath(b'/tmp/dir') / PosixPath('r\xE9mis/files/')).path, b'/tmp/dir/r\xC3\xA9mis/files') if PY3: self.assertEqual(PosixPath('/tmp/r\uDCE9mi').path, b'/tmp/r\xE9mi') self.assertEqual((PosixPath(b'/home/test') / PosixPath('/var/log')).path, b'/var/log') def test_plus(self): """Tests the plus operator.""" self.assertEqual((PosixPath('some/file.txt') + '.bak').path, b'some/file.txt.bak') with self.assertRaises(TypeError): PosixPath('some/file.txt') + PosixPath('.bak') with self.assertRaises(ValueError): PosixPath('some/file.txt') + '.bak/kidding' with self.assertRaises(ValueError): PosixPath('some/file.txt') + '/backup' def test_str(self): """Tests getting string representations (repr/bytes/unicode).""" utf = PosixPath(b'/tmp/r\xC3\xA9mi') nonutf = PosixPath(b'/tmp/r\xE9mi') # repr() self.assertEqual(repr(utf), "PosixPath(b'/tmp/r\\xc3\\xa9mi')") self.assertEqual(repr(nonutf), "PosixPath(b'/tmp/r\\xe9mi')") # bytes() self.assertEqual(bytes(utf), b'/tmp/r\xC3\xA9mi') self.assertEqual(bytes(nonutf), b'/tmp/r\xE9mi') # unicode() self.assertEqual(unicode(utf), '/tmp/r\xE9mi') self.assertEqual(unicode(nonutf), '/tmp/r\uDCE9mi' if PY3 else '/tmp/r\uFFFDmi') def test_parts(self): """Tests parent, ancestor, name, stem, ext.""" relative = PosixPath(b'directory/users/r\xE9mi/file.txt') absolute = PosixPath('/some/other/thing.h\xE9h\xE9') self.assertEqual(relative.parent.path, b'directory/users/r\xE9mi') self.assertEqual(absolute.parent.path, b'/some/other') self.assertEqual(absolute.ancestor(10).path, b'/') self.assertEqual(relative.name, b'file.txt') self.assertEqual(absolute.name, b'thing.h\xC3\xA9h\xC3\xA9') self.assertEqual(absolute.unicodename, 'thing.h\xE9h\xE9') self.assertEqual(absolute.stem, b'thing') self.assertEqual(absolute.ext, b'.h\xC3\xA9h\xC3\xA9') self.assertEqual(relative._components(), [b'directory', b'users', b'r\xE9mi', b'file.txt']) self.assertEqual(absolute._components(), [b'/', b'some', b'other', b'thing.h\xC3\xA9h\xC3\xA9']) def test_root(self): """Tests roots.""" a = PosixPath(b'some/relative/path') b = PosixPath('alsorelative') c = PosixPath(b'/this/is/absolute') d = PosixPath('/') def split_root(f): return tuple(p.path for p in f.split_root()) # FIXME : This behaves weirdly because of normpath(). Do we want this? self.assertEqual(split_root(a), (b'.', b'some/relative/path')) self.assertEqual(split_root(b), (b'.', b'alsorelative')) self.assertFalse(b.is_absolute) self.assertEqual(split_root(c), (b'/', b'this/is/absolute')) self.assertTrue(c.is_absolute) self.assertEqual(split_root(d), (b'/', b'.')) self.assertTrue(d.is_absolute) self.assertEqual(d.root.path, b'/') def test_rel_path_to(self): """Tests the rel_path_to method.""" self.assertEqual(PosixPath('.').rel_path_to(PosixPath('')).path, b'.') self.assertEqual(PosixPath(b'/var/log/apache2/').rel_path_to( b'/var/www/cat.jpg').path, b'../../www/cat.jpg') self.assertEqual(PosixPath(b'/var/log/apache2/').rel_path_to( b'/tmp/access.log').path, b'../../../tmp/access.log') self.assertEqual(PosixPath(b'var/log').rel_path_to( b'var/log/apache2/access.log').path, b'apache2/access.log') self.assertEqual(PosixPath(b'/var/log/apache2').rel_path_to( b'/var/log/apache2').path, b'.') self.assertEqual(PosixPath(b'/').rel_path_to( b'/var/log/apache2/access.log').path, b'var/log/apache2/access.log') self.assertEqual(PosixPath(b'/tmp/secretdir/').rel_path_to( b'/').path, b'../..') def test_lies_under(self): """ Tests the lies_under method.""" self.assertTrue(PosixPath(b'/tmp') .lies_under(b'/')) self.assertFalse(PosixPath(b'/tmp') .lies_under(b'/var')) self.assertTrue(PosixPath(b'/tmp/some/file/here') .lies_under(b'/tmp/some')) self.assertFalse(PosixPath(b'/tmp/some/file/here') .lies_under(b'/tmp/no')) self.assertFalse(PosixPath(b'/tmp/some/file/here') .lies_under(b'/no/tmp/some')) self.assertFalse(PosixPath(b'/tmp/some/file/here') .lies_under(b'/no/some')) self.assertTrue(PosixPath(b'/tmp/some/file/here') .lies_under(b'/tmp/some/file/here')) self.assertTrue(PosixPath(b'/') .lies_under(b'/')) self.assertTrue(PosixPath(b'') .lies_under(b'')) self.assertTrue(PosixPath(b'test') .lies_under(b'')) self.assertFalse(PosixPath(b'') .lies_under(b'test')) self.assertFalse(PosixPath(b'test') .lies_under(b'/')) def test_comparisons(self): """Tests the comparison operators.""" self.assertTrue(PosixPath(b'/tmp/r\xE9mi') == b'/tmp/r\xE9mi') self.assertTrue(PosixPath(b'/file') != b'/FILE') self.assertFalse(PosixPath(b'file') == PosixPath(b'dir')) self.assertFalse(WindowsPath('some/file') == PosixPath('some/file')) self.assertTrue(PosixPath(b'path/to/file1') < b'path/to/file2') self.assertFalse(b'path/to/file1' >= PosixPath(b'path/to/file2')) if PY3: with self.assertRaises(TypeError): WindowsPath('some/file') < PosixPath('other/file')<|fim▁end|>
class TestWindows(unittest.TestCase): """Tests for WindowsPath. """ def test_construct(self):
<|file_name|>advogato.py<|end_file_name|><|fim▁begin|># Copyright (c) 2001-2004 Twisted Matrix Laboratories.<|fim▁hole|># See LICENSE for details. # ''' Usage: advogato.py <name> <diary entry file> ''' from twisted.web.xmlrpc import Proxy from twisted.internet import reactor from getpass import getpass import sys class AddDiary: def __init__(self, name, password): self.name = name self.password = password self.proxy = Proxy('http://advogato.org/XMLRPC') def __call__(self, filename): self.data = open(filename).read() d = self.proxy.callRemote('authenticate', self.name, self.password) d.addCallbacks(self.login, self.noLogin) def noLogin(self, reason): print "could not login" reactor.stop() def login(self, cookie): d = self.proxy.callRemote('diary.set', cookie, -1, self.data) d.addCallbacks(self.setDiary, self.errorSetDiary) def setDiary(self, response): reactor.stop() def errorSetDiary(self, error): print "could not set diary", error reactor.stop() diary = AddDiary(sys.argv[1], getpass()) diary(sys.argv[2]) reactor.run()<|fim▁end|>
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use std::net::{TcpStream, TcpListener}; use std::thread; fn handle_client(stream: TcpStream) { println!("Someone connected!"); } fn main() { println!("Starting Iron IRCd");<|fim▁hole|> match stream { Ok(stream) => { thread::spawn(move || { handle_client(stream) }); } Err(e) => { /* Connection Failed */ } } } }<|fim▁end|>
let listener = TcpListener::bind("127.0.0.1:6667").unwrap(); for stream in listener.incoming() {
<|file_name|>c_cm1x.py<|end_file_name|><|fim▁begin|># ---------------------------------------------------------------------------- # Copyright (C) 2013-2014 Huynh Vi Lam <domovilam@gmail.com> # # This file is part of pimucha. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the<|fim▁hole|># ---------------------------------------------------------------------------- import logging,sys logger = logging.getLogger() X10Checkers = { 'RF' : 'x10chk(args)' , 'PL' : 'x10chk(args)' , } X10Encoders = { 'RF' : ('', 'x10rf2hex(args)', '') , 'PL' : ('', 'x10pl2hex(args)', '') , }<|fim▁end|>
# GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>.
<|file_name|>Script.ts<|end_file_name|><|fim▁begin|>import { ILogger, getLogger } from './loggers'; import { CancellationToken, ICancellationToken } from '../util/CancellationToken'; import { Step } from './Step'; import { ModuleLoader } from './ModuleLoader'; import * as _ from 'underscore'; import { Guid } from '../util/Guid'; import { InjectorLookup, Module, ModuleRepository } from './Modules'; import { IScope, Scope } from './Scope'; import validateScriptDefinition from './scriptDefinitionValidator'; import * as helpers from '../util/helpers'; import './modules/assert'; import './modules/async'; import './modules/conditional'; import './modules/http'; import './modules/json'; import './modules/loop'; import './modules/math'; import './modules/misc'; import './modules/stats'; import './modules/timer'; import './modules/wait'; const YAML = require('pumlhorse-yamljs'); class ScriptOptions { logger: ILogger; } export interface IScript { run(context: any, cancellationToken?: ICancellationToken): Promise<any>; addFunction(name: string, func: Function): void; addModule(moduleDescriptor: string | Object): void; id: string; name: string; } export interface IScriptDefinition { name: string; description?: string; modules?: any[]; functions?: Object; expects?: string[]; steps: any[]; cleanup?: any[]; } export class Script implements IScript { id: string; name: string; private internalScript: IScriptInternal; private static readonly DefaultModules = ['assert', 'async', 'conditional', 'json', 'loop', 'math', 'misc', 'timer', 'wait', 'http = http']; public static readonly StandardModules = Script.DefaultModules.concat(['stats']); constructor(private scriptDefinition: IScriptDefinition, private scriptOptions?: ScriptOptions) { validateScriptDefinition(this.scriptDefinition); this.id = new Guid().value;<|fim▁hole|> if (this.scriptOptions == null) { this.scriptOptions = new ScriptOptions(); } if (this.scriptOptions.logger == null) { this.scriptOptions.logger = getLogger(); } this.internalScript = new InternalScript(this.id, this.scriptOptions); } static create(scriptText: string, scriptOptions?: ScriptOptions): Script { const scriptDefinition = YAML.parse(scriptText); return new Script(scriptDefinition, scriptOptions); } async run(context?: any, cancellationToken?: ICancellationToken): Promise<any> { if (cancellationToken == null) cancellationToken = CancellationToken.None; this.evaluateExpectations(context); this.loadModules(); this.loadFunctions(); this.loadCleanupSteps(); const scope = new Scope(this.internalScript, context); try { await this.internalScript.runSteps(this.scriptDefinition.steps, scope, cancellationToken); return scope; } finally { await this.runCleanupTasks(scope, cancellationToken); } } addFunction(name: string, func: Function): void { this.internalScript.functions[name] = func; } addModule(moduleDescriptor: string | Object) { const moduleLocator = ModuleLoader.getModuleLocator(moduleDescriptor); const mod = ModuleRepository.lookup[moduleLocator.name]; if (mod == null) throw new Error(`Module "${moduleLocator.name}" does not exist`); if (moduleLocator.hasNamespace) { helpers.assignObjectByString(this.internalScript.modules, moduleLocator.namespace, mod.getFunctions()); } else { _.extend(this.internalScript.modules, mod.getFunctions()); } _.extend(this.internalScript.injectors, mod.getInjectors()) } private evaluateExpectations(context: any) { if (this.scriptDefinition.expects == null) return; const missingValues = _.difference(this.scriptDefinition.expects.map(m => m.toString()), _.keys(context)); if (missingValues.length > 0) { throw new Error(missingValues.length > 1 ? `Expected values "${missingValues.join(', ')}", but they were not passed` : `Expected value "${missingValues[0]}", but it was not passed`) } } private loadModules() { const modules = Script.DefaultModules.concat(this.scriptDefinition.modules == null ? [] : this.scriptDefinition.modules) for (let i = 0; i < modules.length; i++) { this.addModule(modules[i]); } } private loadFunctions() { this.addFunction('debug', (msg) => this.scriptOptions.logger.debug(msg)); this.addFunction('log', (msg) => this.scriptOptions.logger.log(msg)); this.addFunction('warn', (msg) => this.scriptOptions.logger.warn(msg)); this.addFunction('error', (msg) => this.scriptOptions.logger.error(msg)); const functions = this.scriptDefinition.functions; if (functions == null) { return; } for(let name in functions) { this.addFunction(name, this.createFunction(functions[name])); } } private createFunction(val) { if (_.isString(val)) return new Function(val) function construct(args) { function DeclaredFunction(): void { return Function.apply(this, args); } DeclaredFunction.prototype = Function.prototype; return new DeclaredFunction(); } return construct(val) } private loadCleanupSteps() { if (this.scriptDefinition.cleanup == null) { return; } for (let i = 0; i < this.scriptDefinition.cleanup.length; i++) { this.internalScript.cleanup.push(this.scriptDefinition.cleanup[i]); } } private async runCleanupTasks(scope: Scope, cancellationToken: ICancellationToken): Promise<any> { if (this.internalScript.cleanup == null) { return; } for (let i = 0; i < this.internalScript.cleanup.length; i++) { const task = this.internalScript.cleanup[i]; try { await this.internalScript.runSteps([task], scope, cancellationToken); } catch (e) { this.scriptOptions.logger.error(`Error in cleanup task: ${e.message}`); } } } } export interface IScriptInternal { modules: Module[]; functions: {[name: string]: Function}; injectors: InjectorLookup; steps: any[]; cleanup: any[]; emit(eventName: string, eventInfo: any); addCleanupTask(task: any, atEnd?: boolean); getModule(moduleName: string): any; id: string; runSteps(steps: any[], scope: IScope, cancellationToken?: ICancellationToken): Promise<any>; } class InternalScript implements IScriptInternal { id: string; modules: Module[]; injectors: InjectorLookup; functions: {[name: string]: Function}; steps: any[]; cleanup: any[]; private cancellationToken: ICancellationToken; private isEnded: boolean = false; constructor(id: string, private scriptOptions: ScriptOptions) { this.id = id; this.modules = []; this.injectors = { '$scope': (scope: IScope) => scope, '$logger': () => this.scriptOptions.logger }; this.functions = { 'end': () => { this.isEnded = true; } }; this.steps = []; this.cleanup = []; } emit(): void { } addCleanupTask(task: any, atEnd?: boolean): void { if (atEnd) this.cleanup.push(task); else this.cleanup.splice(0, 0, task); } getModule(moduleName: string): any { return this.modules[moduleName]; } async runSteps(steps: any[], scope: IScope, cancellationToken: ICancellationToken): Promise<any> { if (cancellationToken != null) { this.cancellationToken = cancellationToken; } if (steps == null || steps.length == 0) { this.scriptOptions.logger.warn('Script does not contain any steps'); return; } _.extend(scope, this.modules, this.functions); for (let i = 0; i < steps.length; i++) { if (this.cancellationToken.isCancellationRequested || this.isEnded) { return; } await this.runStep(steps[i], scope); } } private async runStep(stepDefinition: any, scope: IScope) { if (_.isFunction(stepDefinition)) { // If we programatically added a function as a step, just shortcut and run it return stepDefinition.call(scope); } let step: Step; const lineNumber = stepDefinition.getLineNumber == null ? null : stepDefinition.getLineNumber(); if (_.isString(stepDefinition)) { step = new Step(stepDefinition, null, scope, this.injectors, lineNumber); } else { const functionName = _.keys(stepDefinition)[0]; step = new Step(functionName, stepDefinition[functionName], scope, this.injectors, lineNumber); } await step.run(this.cancellationToken); } }<|fim▁end|>
this.name = scriptDefinition.name;
<|file_name|>test_parse_rank_score.py<|end_file_name|><|fim▁begin|>from scout.parse.variant.rank_score import parse_rank_score from scout.parse.variant.variant import parse_variant def test_parse_rank_score(): ## GIVEN a rank score string on genmod format rank_scores_info = "123:10" variant_score = 10.0 family_id = "123" ## WHEN parsing the rank score parsed_rank_score = parse_rank_score(rank_scores_info, family_id) ## THEN assert that the correct rank score is parsed assert variant_score == parsed_rank_score def test_parse_rank_score_no_score(): ## GIVEN a empty rank score string rank_scores_info = "" family_id = "123" ## WHEN parsing the rank score parsed_rank_score = parse_rank_score(rank_scores_info, family_id)<|fim▁hole|> def test_parse_rank_score_variant(cyvcf2_variant, case_obj, scout_config): ## GIVEN a variant rank_score = 15 case_id = case_obj["_id"] ## WHEN adding a rank score string to the INFO field rank_score_str = f"{case_id}:{rank_score}" cyvcf2_variant.INFO["RankScore"] = rank_score_str ## WHEN parsing the variant var_info = parse_variant(cyvcf2_variant, case_obj) ## THEN assert that the correct score is parsed assert var_info["rank_score"] == rank_score<|fim▁end|>
## THEN assert that None is returned assert parsed_rank_score == None
<|file_name|>single_thread_calculator.rs<|end_file_name|><|fim▁begin|>use crate::{MonteCarloPiCalculator, gen_random}; use std::sync::Arc; pub struct SingleThreadCalculator {} impl SingleThreadCalculator { #[inline] fn gen_randoms_static(n: usize) -> (Vec<f64>, Vec<f64>) { let mut xs = vec![0.0; n]; let mut ys = vec![0.0; n]; for i in 0..n { let mut t = gen_random(i as f64 / n as f64); t = gen_random(t); t = gen_random(t); xs[i] = t; for _ in 0..10 { t = gen_random(t); }<|fim▁hole|> ys[i] = t; } return (xs, ys); } #[inline] #[allow(unused_parens)] fn cal_static(xs: &Arc<Vec<f64>>, ys: &Arc<Vec<f64>>, n: usize) -> u64 { let mut cnt = 0; for i in 0..n { if (xs[i] * xs[i] + ys[i] * ys[i] < 1.0) { cnt += 1; } } return cnt; } } impl MonteCarloPiCalculator for SingleThreadCalculator { #[inline] fn new(_n: usize) -> SingleThreadCalculator { return SingleThreadCalculator {}; } #[inline] fn gen_randoms(&self, n: usize) -> (Vec<f64>, Vec<f64>) { return SingleThreadCalculator::gen_randoms_static(n); } #[inline] fn cal(&self, xs: &Arc<Vec<f64>>, ys: &Arc<Vec<f64>>, n: usize) -> u64 { return SingleThreadCalculator::cal_static(xs, ys, n); } }<|fim▁end|>
<|file_name|>cog_selector.py<|end_file_name|><|fim▁begin|># #START_LICENSE########################################################### # # # This file is part of the Environment for Tree Exploration program # (ETE). http://etetoolkit.org # # ETE is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ETE is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY # or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public # License for more details. # # You should have received a copy of the GNU General Public License # along with ETE. If not, see <http://www.gnu.org/licenses/>. # # # ABOUT THE ETE PACKAGE # ===================== # # ETE is distributed under the GPL copyleft license (2008-2015). # # If you make use of ETE in published work, please cite: # # Jaime Huerta-Cepas, Joaquin Dopazo and Toni Gabaldon. # ETE: a python Environment for Tree Exploration. Jaime BMC # Bioinformatics 2010,:24doi:10.1186/1471-2105-11-24 # # Note that extra references to the specific methods implemented in # the toolkit may be available in the documentation. # # More info at http://etetoolkit.org. Contact: huerta@embl.de # # # #END_LICENSE############################################################# from StringIO import StringIO import cPickle from string import strip from collections import defaultdict import logging import os log = logging.getLogger("main") from ete2.tools.phylobuild_lib.master_task import CogSelectorTask from ete2.tools.phylobuild_lib.errors import DataError, TaskError from ete2.tools.phylobuild_lib.utils import (GLOBALS, print_as_table, generate_node_ids, encode_seqname, md5, pjoin, _min, _max, _mean, _median, _std) from ete2.tools.phylobuild_lib import db __all__ = ["CogSelector"] class CogSelector(CogSelectorTask): def __init__(self, target_sp, out_sp, seqtype, conf, confname): self.missing_factor = float(conf[confname]["_species_missing_factor"]) self.max_missing_factor = float(conf[confname]["_max_species_missing_factor"]) self.cog_hard_limit = int(conf[confname]["_max_cogs"])<|fim▁hole|> CogSelectorTask.__init__(self, node_id, "cog_selector", "MCL-COGs", None, conf[confname]) # taskid does not depend on jobs, so I set it manually self.cladeid = clade_id self.seqtype = seqtype self.targets = target_sp self.outgroups = out_sp self.init() self.size = len(target_sp | out_sp) self.cog_analysis = None self.cogs = None def finish(self): def sort_cogs_by_size(c1, c2): ''' sort cogs by descending size. If two cogs are the same size, sort them keeping first the one with the less represented species. Otherwise sort by sequence name sp_seqid.''' r = -1 * cmp(len(c1), len(c2)) if r == 0: # finds the cog including the less represented species c1_repr = _min([sp2cogs[_sp] for _sp, _seq in c1]) c2_repr = _min([sp2cogs[_sp] for _sp, _seq in c2]) r = cmp(c1_repr, c2_repr) if r == 0: return cmp(sorted(c1), sorted(c2)) else: return r else: return r def sort_cogs_by_sp_repr(c1, c2): c1_repr = _min([sp2cogs[_sp] for _sp, _seq in c1]) c2_repr = _min([sp2cogs[_sp] for _sp, _seq in c2]) r = cmp(c1_repr, c2_repr) if r == 0: r = -1 * cmp(len(c1), len(c2)) if r == 0: return cmp(sorted(c1), sorted(c2)) else: return r else: return r all_species = self.targets | self.outgroups # strict threshold #min_species = len(all_species) - int(round(self.missing_factor * len(all_species))) # Relax threshold for cog selection to ensure sames genes are always included min_species = len(all_species) - int(round(self.missing_factor * len(GLOBALS["target_species"]))) min_species = max(min_species, (1-self.max_missing_factor) * len(all_species)) smallest_cog, largest_cog = len(all_species), 0 all_singletons = [] sp2cogs = defaultdict(int) for cognumber, cog in enumerate(open(GLOBALS["cogs_file"])): sp2seqs = defaultdict(list) for sp, seqid in [map(strip, seq.split(GLOBALS["spname_delimiter"], 1)) for seq in cog.split("\t")]: sp2seqs[sp].append(seqid) one2one_cog = set() for sp, seqs in sp2seqs.iteritems(): #if len(seqs) != 1: # print sp, len(seqs) if sp in all_species and len(seqs) == 1: sp2cogs[sp] += 1 one2one_cog.add((sp, seqs[0])) smallest_cog = min(smallest_cog, len(one2one_cog)) largest_cog = max(largest_cog, len(one2one_cog)) all_singletons.append(one2one_cog) #if len(one2one_cog) >= min_species: # valid_cogs.append(one2one_cog) cognumber += 1 # sets the ammount of cogs in file for sp, ncogs in sorted(sp2cogs.items(), key=lambda x: x[1], reverse=True): log.log(28, "% 20s found in single copy in % 6d (%0.1f%%) COGs " %(sp, ncogs, 100 * ncogs/float(cognumber))) valid_cogs = sorted([sing for sing in all_singletons if len(sing) >= min_species], sort_cogs_by_size) log.log(28, "Largest cog size: %s. Smallest cog size: %s" %( largest_cog, smallest_cog)) self.cog_analysis = "" # save original cog names hitting the hard limit if len(valid_cogs) > self.cog_hard_limit: log.warning("Applying hard limit number of COGs: %d out of %d available" %(self.cog_hard_limit, len(valid_cogs))) self.raw_cogs = valid_cogs[:self.cog_hard_limit] self.cogs = [] # Translate sequence names into the internal DB names sp_repr = defaultdict(int) sizes = [] for co in self.raw_cogs: sizes.append(len(co)) for sp, seq in co: sp_repr[sp] += 1 co_names = ["%s%s%s" %(sp, GLOBALS["spname_delimiter"], seq) for sp, seq in co] encoded_names = db.translate_names(co_names) if len(encoded_names) != len(co): print set(co) - set(encoded_names.keys()) raise DataError("Some sequence ids could not be translated") self.cogs.append(encoded_names.values()) # ERROR! COGs selected are not the prioritary cogs sorted out before!!! # Sort Cogs according to the md5 hash of its content. Random # sorting but kept among runs #map(lambda x: x.sort(), self.cogs) #self.cogs.sort(lambda x,y: cmp(md5(','.join(x)), md5(','.join(y)))) log.log(28, "Analysis of current COG selection:") for sp, ncogs in sorted(sp_repr.items(), key=lambda x:x[1], reverse=True): log.log(28, " % 30s species present in % 6d COGs (%0.1f%%)" %(sp, ncogs, 100 * ncogs/float(len(self.cogs)))) log.log(28, " %d COGs selected with at least %d species out of %d" %(len(self.cogs), min_species, len(all_species))) log.log(28, " Average COG size %0.1f/%0.1f +- %0.1f" %(_mean(sizes), _median(sizes), _std(sizes))) # Some consistency checks missing_sp = (all_species) - set(sp_repr.keys()) if missing_sp: log.error("%d missing species or not present in single-copy in any cog:\n%s" %\ (len(missing_sp), '\n'.join(missing_sp))) open('etebuild.valid_species_names.tmp', 'w').write('\n'.join(sp_repr.keys()) +'\n') log.error("All %d valid species have been dumped into etebuild.valid_species_names.tmp." " You can use --spfile to restrict the analysis to those species." %len(sp_repr)) raise TaskError('missing or not single-copy species under current cog selection') CogSelectorTask.store_data(self, self.cogs, self.cog_analysis) if __name__ == "__main__": ## TEST CODE import argparse parser = argparse.ArgumentParser() # Input data related flags parser.add_argument("--cogs_file", dest="cogs_file", required=True, help="Cogs file") parser.add_argument("--spname_delimiter", dest="spname_delimiter", type=str, default = "_", help="species name delimiter character") parser.add_argument("--target_sp", dest="target_sp", type=str, nargs="+", help="target species sperated by") parser.add_argument("-m", dest="missing_factor", type=float, required=True, help="missing factor for cog selection") parser.add_argument("--max_missing", dest="max_missing_factor", type=float, default = 0.3, help="max missing factor for cog selection") parser.add_argument("--total_species", dest="total_species", type=int, required=True, help="total number of species in the analysis") args = parser.parse_args() GLOBALS["cogs_file"] = args.cogs_file GLOBALS["spname_delimiter"] = args.spname_delimiter target_sp = args.target_sp logging.basicConfig(level=logging.DEBUG) log = logging GLOBALS["target_species"] = [1] * args.total_species conf = { "user": {"_species_missing_factor": args.missing_factor, "_max_species_missing_factor": args.max_missing_factor, "_max_cogs": 10000 }} CogSelectorTask.store_data=lambda a,b,c: True C = CogSelector(set(target_sp), set(), "aa", conf, "user") db.translate_names = lambda x: dict([(n,n) for n in x]) C.finish()<|fim▁end|>
node_id, clade_id = generate_node_ids(target_sp, out_sp) # Initialize task
<|file_name|>MultiSurfaceType.java<|end_file_name|><|fim▁begin|>/* * Copyright 2013 Gunnar Kappei. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.opengis.gml; /**<|fim▁hole|> * An XML MultiSurfaceType(@http://www.opengis.net/gml). * * This is a complex type. */ public interface MultiSurfaceType extends net.opengis.gml.AbstractGeometricAggregateType { public static final org.apache.xmlbeans.SchemaType type = (org.apache.xmlbeans.SchemaType) org.apache.xmlbeans.XmlBeans.typeSystemForClassLoader(MultiSurfaceType.class.getClassLoader(), "schemaorg_apache_xmlbeans.system.s6E28D279B6C224D74769DB8B98AF1665").resolveHandle("multisurfacetypeb44dtype"); /** * Gets a List of "surfaceMember" elements */ java.util.List<net.opengis.gml.SurfacePropertyType> getSurfaceMemberList(); /** * Gets array of all "surfaceMember" elements * @deprecated */ @Deprecated net.opengis.gml.SurfacePropertyType[] getSurfaceMemberArray(); /** * Gets ith "surfaceMember" element */ net.opengis.gml.SurfacePropertyType getSurfaceMemberArray(int i); /** * Returns number of "surfaceMember" element */ int sizeOfSurfaceMemberArray(); /** * Sets array of all "surfaceMember" element */ void setSurfaceMemberArray(net.opengis.gml.SurfacePropertyType[] surfaceMemberArray); /** * Sets ith "surfaceMember" element */ void setSurfaceMemberArray(int i, net.opengis.gml.SurfacePropertyType surfaceMember); /** * Inserts and returns a new empty value (as xml) as the ith "surfaceMember" element */ net.opengis.gml.SurfacePropertyType insertNewSurfaceMember(int i); /** * Appends and returns a new empty value (as xml) as the last "surfaceMember" element */ net.opengis.gml.SurfacePropertyType addNewSurfaceMember(); /** * Removes the ith "surfaceMember" element */ void removeSurfaceMember(int i); /** * Gets the "surfaceMembers" element */ net.opengis.gml.SurfaceArrayPropertyType getSurfaceMembers(); /** * True if has "surfaceMembers" element */ boolean isSetSurfaceMembers(); /** * Sets the "surfaceMembers" element */ void setSurfaceMembers(net.opengis.gml.SurfaceArrayPropertyType surfaceMembers); /** * Appends and returns a new empty "surfaceMembers" element */ net.opengis.gml.SurfaceArrayPropertyType addNewSurfaceMembers(); /** * Unsets the "surfaceMembers" element */ void unsetSurfaceMembers(); /** * A factory class with static methods for creating instances * of this type. */ public static final class Factory { public static net.opengis.gml.MultiSurfaceType newInstance() { return (net.opengis.gml.MultiSurfaceType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, null ); } public static net.opengis.gml.MultiSurfaceType newInstance(org.apache.xmlbeans.XmlOptions options) { return (net.opengis.gml.MultiSurfaceType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, options ); } /** @param xmlAsString the string value to parse */ public static net.opengis.gml.MultiSurfaceType parse(java.lang.String xmlAsString) throws org.apache.xmlbeans.XmlException { return (net.opengis.gml.MultiSurfaceType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xmlAsString, type, null ); } public static net.opengis.gml.MultiSurfaceType parse(java.lang.String xmlAsString, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException { return (net.opengis.gml.MultiSurfaceType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xmlAsString, type, options ); } /** @param file the file from which to load an xml document */ public static net.opengis.gml.MultiSurfaceType parse(java.io.File file) throws org.apache.xmlbeans.XmlException, java.io.IOException { return (net.opengis.gml.MultiSurfaceType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( file, type, null ); } public static net.opengis.gml.MultiSurfaceType parse(java.io.File file, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException { return (net.opengis.gml.MultiSurfaceType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( file, type, options ); } public static net.opengis.gml.MultiSurfaceType parse(java.net.URL u) throws org.apache.xmlbeans.XmlException, java.io.IOException { return (net.opengis.gml.MultiSurfaceType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( u, type, null ); } public static net.opengis.gml.MultiSurfaceType parse(java.net.URL u, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException { return (net.opengis.gml.MultiSurfaceType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( u, type, options ); } public static net.opengis.gml.MultiSurfaceType parse(java.io.InputStream is) throws org.apache.xmlbeans.XmlException, java.io.IOException { return (net.opengis.gml.MultiSurfaceType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( is, type, null ); } public static net.opengis.gml.MultiSurfaceType parse(java.io.InputStream is, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException { return (net.opengis.gml.MultiSurfaceType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( is, type, options ); } public static net.opengis.gml.MultiSurfaceType parse(java.io.Reader r) throws org.apache.xmlbeans.XmlException, java.io.IOException { return (net.opengis.gml.MultiSurfaceType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( r, type, null ); } public static net.opengis.gml.MultiSurfaceType parse(java.io.Reader r, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException { return (net.opengis.gml.MultiSurfaceType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( r, type, options ); } public static net.opengis.gml.MultiSurfaceType parse(javax.xml.stream.XMLStreamReader sr) throws org.apache.xmlbeans.XmlException { return (net.opengis.gml.MultiSurfaceType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( sr, type, null ); } public static net.opengis.gml.MultiSurfaceType parse(javax.xml.stream.XMLStreamReader sr, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException { return (net.opengis.gml.MultiSurfaceType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( sr, type, options ); } public static net.opengis.gml.MultiSurfaceType parse(org.w3c.dom.Node node) throws org.apache.xmlbeans.XmlException { return (net.opengis.gml.MultiSurfaceType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( node, type, null ); } public static net.opengis.gml.MultiSurfaceType parse(org.w3c.dom.Node node, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException { return (net.opengis.gml.MultiSurfaceType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( node, type, options ); } /** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */ @Deprecated public static net.opengis.gml.MultiSurfaceType parse(org.apache.xmlbeans.xml.stream.XMLInputStream xis) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException { return (net.opengis.gml.MultiSurfaceType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xis, type, null ); } /** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */ @Deprecated public static net.opengis.gml.MultiSurfaceType parse(org.apache.xmlbeans.xml.stream.XMLInputStream xis, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException { return (net.opengis.gml.MultiSurfaceType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xis, type, options ); } /** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */ @Deprecated public static org.apache.xmlbeans.xml.stream.XMLInputStream newValidatingXMLInputStream(org.apache.xmlbeans.xml.stream.XMLInputStream xis) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException { return org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newValidatingXMLInputStream( xis, type, null ); } /** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */ @Deprecated public static org.apache.xmlbeans.xml.stream.XMLInputStream newValidatingXMLInputStream(org.apache.xmlbeans.xml.stream.XMLInputStream xis, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException { return org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newValidatingXMLInputStream( xis, type, options ); } private Factory() { } // No instance of this class allowed } }<|fim▁end|>
<|file_name|>from_bits.rs<|end_file_name|><|fim▁begin|>use itertools::Itertools; use malachite_base::num::basic::traits::Zero; use malachite_base::num::conversion::traits::ExactFrom; use malachite_base::num::logic::traits::BitAccess; use malachite_nz::natural::Natural; pub fn from_bits_asc_naive<I: Iterator<Item = bool>>(bits: I) -> Natural { let mut n = Natural::ZERO; for i in bits.enumerate().filter_map(|(index, bit)| { if bit { Some(u64::exact_from(index)) } else { None } }) { n.set_bit(i); } n } pub fn from_bits_desc_naive<I: Iterator<Item = bool>>(bits: I) -> Natural { let bits = bits.collect_vec(); let mut n = Natural::ZERO; for i in bits.iter().rev().enumerate().filter_map(|(index, &bit)| { if bit { Some(u64::exact_from(index)) } else { None }<|fim▁hole|> n.set_bit(i); } n }<|fim▁end|>
}) {
<|file_name|>app.js<|end_file_name|><|fim▁begin|>(function() { function config($stateProvider, $locationProvider) { $locationProvider .html5Mode({ enabled: true, requireBase: false }); $stateProvider .state('landing', { url: '/',<|fim▁hole|> url: '/album', controller: 'AlbumCtrl as album', templateUrl: '/templates/album.html' }) .state('collection', { url: '/collection', controller: 'CollectionCtrl as collection', templateUrl: '/templates/collection.html' }); } angular .module('blocJams', ['ui.router']) .config(config); })();<|fim▁end|>
controller: 'LandingCtrl as landing', templateUrl: '/templates/landing.html' }) .state('album', {
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod clauses; pub mod elements; pub mod expressions; pub mod func; pub mod keywords; pub mod math; pub mod select_statement; pub mod values; use nom::branch::alt; use nom::combinator::map; use nom::number::complete::double; use nom::IResult; use ordered_float::OrderedFloat; use crate::parsers::value::PqlValue; pub use elements::{float_number, string_allowed_in_field, whitespace};<|fim▁hole|>pub use expressions::parse_field; pub use expressions::parse_path_as_expr; pub fn parse_value(input: &str) -> IResult<&str, PqlValue> { alt(( map(elements::string, |s| PqlValue::Str(s.to_string())), map(double, |f| PqlValue::Float(OrderedFloat(f as f64))), ))(input) }<|fim▁end|>
pub use expressions::parse_expr;
<|file_name|>get_outgoing.rs<|end_file_name|><|fim▁begin|>use chrono::{UTC, Date, Datelike}; use std::str::FromStr; // Use of #from_str. use api::client::{TellerClient, ApiServiceResult, Transaction}; use api::client::parse_utc_date_from_transaction; use api::inform::Money; pub trait GetOutgoing { fn get_outgoing(&self, account_id: &str, for_month: &Date<UTC>) -> ApiServiceResult<Money>; } impl<'a> GetOutgoing for TellerClient<'a> { fn get_outgoing(&self, account_id: &str, for_month: &Date<UTC>) -> ApiServiceResult<Money> { let account = try!(self.get_account(&account_id)); let currency = account.currency; let from = for_month.with_day(1).unwrap(); let to = if from.month() < 12 { from.with_month(from.month() + 1).unwrap() } else { from.with_year(from.year() + 1).unwrap().with_month(1).unwrap() }; let transactions: Vec<Transaction> = self.raw_transactions(&account_id, 250, 1) .unwrap_or(vec![]) .into_iter() .filter(|t| { let transaction_date = parse_utc_date_from_transaction(&t); from <= transaction_date && transaction_date <= to }) .collect(); let from_float_string_to_cent_integer = |t: &Transaction| { (f64::from_str(&t.amount).unwrap() * 100f64).round() as i64 }; let from_cent_integer_to_float_string = |amount: i64| format!("{:.2}", amount as f64 / 100f64); let outgoing = transactions.iter() .map(from_float_string_to_cent_integer) .filter(|ci| *ci < 0)<|fim▁hole|> Ok(Money::new(from_cent_integer_to_float_string(outgoing.abs()), currency)) } } #[cfg(test)] mod tests { use api::client::{TellerClient, generate_utc_date_from_date_str}; use super::GetOutgoing; use hyper; mock_connector_in_order!(GetAccountFollowedByGetTransactions { include_str!("../mocks/get-account.http") include_str!("../mocks/get-transactions.http") }); #[test] fn can_get_outgoing() { let c = hyper::client::Client::with_connector(GetAccountFollowedByGetTransactions::default()); let teller = TellerClient::new_with_hyper_client("fake-auth-token", c); let current_month = generate_utc_date_from_date_str("2016-01-01"); let money = teller.get_outgoing("123", &current_month).unwrap(); assert_eq!("55.00 GBP", money.get_balance_for_display(&false)); } }<|fim▁end|>
.fold(0i64, |sum, v| sum + v);
<|file_name|>current.py<|end_file_name|><|fim▁begin|>"""This *was* the parser for the current HTML format on parl.gc.ca. But now we have XML. See parl_document.py. This module is organized like so: __init__.py - utility functions, simple parse interface common.py - infrastructure used in the parsers, i.e. regexes<|fim▁hole|> """ from parliament.imports.hans_old.common import * import logging logger = logging.getLogger(__name__) class HansardParser2009(HansardParser): def __init__(self, hansard, html): for regex in STARTUP_RE_2009: html = re.sub(regex[0], regex[1], html) super(HansardParser2009, self).__init__(hansard, html) for x in self.soup.findAll('a', 'deleteMe'): x.findParent('div').extract() def process_related_link(self, tag, string, current_politician=None): #print "PROCESSING RELATED for %s" % string resid = re.search(r'ResourceID=(\d+)', tag['href']) restype = re.search(r'ResourceType=(Document|Affiliation)', tag['href']) if not resid and restype: return string resid, restype = int(resid.group(1)), restype.group(1) if restype == 'Document': try: bill = Bill.objects.get_by_legisinfo_id(resid) except Bill.DoesNotExist: match = re.search(r'\b[CS]\-\d+[A-E]?\b', string) if not match: logger.error("Invalid bill link %s" % string) return string bill = Bill.objects.create_temporary_bill(legisinfo_id=resid, number=match.group(0), session=self.hansard.session) except Exception, e: print "Related bill search failed for callback %s" % resid print repr(e) return string return u'<bill id="%d" name="%s">%s</bill>' % (bill.id, escape(bill.name), string) elif restype == 'Affiliation': try: pol = Politician.objects.getByParlID(resid) except Politician.DoesNotExist: print "Related politician search failed for callback %s" % resid if getattr(settings, 'PARLIAMENT_LABEL_FAILED_CALLBACK', False): # FIXME migrate away from internalxref? InternalXref.objects.get_or_create(schema='pol_parlid', int_value=resid, target_id=-1) return string if pol == current_politician: return string # When someone mentions her riding, don't link back to her return u'<pol id="%d" name="%s">%s</pol>' % (pol.id, escape(pol.name), string) def get_text(self, cursor): text = u'' for string in cursor.findAll(text=parsetools.r_hasText): if string.parent.name == 'a' and string.parent['class'] == 'WebOption': text += self.process_related_link(string.parent, string, self.t['politician']) else: text += unicode(string) return text def parse(self): super(HansardParser2009, self).parse() # Initialize variables t = ParseTracker() self.t = t member_refs = {} # Get the date c = self.soup.find(text='OFFICIAL REPORT (HANSARD)').findNext('h2') self.date = datetime.datetime.strptime(c.string.strip(), "%A, %B %d, %Y").date() self.hansard.date = self.date self.hansard.save() c = c.findNext(text=r_housemet) match = re.search(r_housemet, c.string) t['timestamp'] = self.houseTime(match.group(1), match.group(2)) t.setNext('timestamp', t['timestamp']) # Move the pointer to the start c = c.next # And start the big loop while c is not None: # It's a string if not hasattr(c, 'name'): pass # Heading elif c.name == 'h2': c = c.next if not parsetools.isString(c): raise ParseException("Expecting string right after h2") t.setNext('heading', parsetools.titleIfNecessary(parsetools.tameWhitespace(c.string.strip()))) # Topic elif c.name == 'h3': top = c.find(text=r_letter) #if not parsetools.isString(c): # check if it's an empty header # if c.parent.find(text=r_letter): # raise ParseException("Expecting string right after h3") if top is not None: c = top t['topic_set'] = True t.setNext('topic', parsetools.titleIfNecessary(parsetools.tameWhitespace(c.string.strip()))) elif c.name == 'h4': if c.string == 'APPENDIX': self.saveStatement(t) print "Appendix reached -- we're done!" break # Timestamp elif c.name == 'a' and c.has_key('name') and c['name'].startswith('T'): match = re.search(r'^T(\d\d)(\d\d)$', c['name']) if match: t.setNext('timestamp', parsetools.time_to_datetime( hour=int(match.group(1)), minute=int(match.group(2)), date=self.date)) else: raise ParseException("Couldn't match time %s" % c.attrs['name']) elif c.name == 'b' and c.string: # Something to do with written answers match = r_honorific.search(c.string) if match: # It's a politician asking or answering a question # We don't get a proper link here, so this has to be a name match polname = re.sub(r'\(.+\)', '', match.group(2)).strip() self.saveStatement(t) t['member_title'] = c.string.strip() t['written_question'] = True try: pol = Politician.objects.get_by_name(polname, session=self.hansard.session) t['politician'] = pol t['member'] = ElectedMember.objects.get_by_pol(politician=pol, date=self.date) except Politician.DoesNotExist: print "WARNING: No name match for %s" % polname except Politician.MultipleObjectsReturned: print "WARNING: Multiple pols for %s" % polname else: if not c.string.startswith('Question'): print "WARNING: Unexplained boldness: %s" % c.string # div -- the biggie elif c.name == 'div': origdiv = c if c.find('b'): # We think it's a new speaker # Save the current buffer self.saveStatement(t) c = c.find('b') if c.find('a'): # There's a link... c = c.find('a') match = re.search(r'ResourceType=Affiliation&ResourceID=(\d+)', c['href']) if match and c.find(text=r_letter): parlwebid = int(match.group(1)) # We have the parl ID. First, see if we already know this ID. pol = Politician.objects.getByParlID(parlwebid, lookOnline=False) if pol is None: # We don't. Try to do a quick name match first (if flags say so) if not GET_PARLID_ONLINE: who = c.next.string match = re.search(r_honorific, who) if match: polname = re.sub(r'\(.+\)', '', match.group(2)).strip() try: #print "Looking for %s..." % polname, pol = Politician.objects.get_by_name(polname, session=self.hansard.session) #print "found." except Politician.DoesNotExist: pass except Politician.MultipleObjectsReturned: pass if pol is None: # Still no match. Go online... try: pol = Politician.objects.getByParlID(parlwebid, session=self.hansard.session) except Politician.DoesNotExist: print "WARNING: Couldn't find politician for ID %d" % parlwebid if pol is not None: t['member'] = ElectedMember.objects.get_by_pol(politician=pol, date=self.date) t['politician'] = pol c = c.next if not parsetools.isString(c): raise Exception("Expecting string in b for member name") t['member_title'] = c.strip() #print c if t['member_title'].endswith(':'): # Remove colon in e.g. Some hon. members: t['member_title'] = t['member_title'][:-1] # Sometimes we don't get a link for short statements -- see if we can identify by backreference if t['member']: member_refs[t['member_title']] = t['member'] # Also save a backref w/o position/riding member_refs[re.sub(r'\s*\(.+\)\s*', '', t['member_title'])] = t['member'] elif t['member_title'] in member_refs: t['member'] = member_refs[t['member_title']] t['politician'] = t['member'].politician c.findParent('b').extract() # We've got the title, now get the rest of the paragraph c = origdiv t.addText(self.get_text(c)) else: # There should be text in here if c.find('div'): if c.find('div', 'Footer'): # We're done! self.saveStatement(t) print "Footer div reached -- done!" break raise Exception("I wasn't expecting another div in here") txt = self.get_text(c).strip() if r_proceedings.search(txt): self.saveStatement(t) self.saveProceedingsStatement(txt, t) else: t.addText(txt, blockquote=bool(c.find('small'))) else: #print c.name if c.name == 'b': print "B: ", print c #if c.name == 'p': # print "P: ", # print c c = c.next return self.statements<|fim▁end|>
current.py - parser for the Hansard format used from 2006 to the present old.py - (fairly crufty) parser for the format used from 1994 to 2006
<|file_name|>problem-0013.rs<|end_file_name|><|fim▁begin|>/// Problem 13 /// Work out the first ten digits of the sum of the following one-hundred 50-digit numbers. /// /// 37107287533902102798797998220837590246510135740250 /// ... fn main() { let raw: String = "\ 37107287533902102798797998220837590246510135740250\n\ 46376937677490009712648124896970078050417018260538\n\ 74324986199524741059474233309513058123726617309629\n\ 91942213363574161572522430563301811072406154908250\n\ 23067588207539346171171980310421047513778063246676\n\ 89261670696623633820136378418383684178734361726757\n\ 28112879812849979408065481931592621691275889832738\n\ 44274228917432520321923589422876796487670272189318\n\ 47451445736001306439091167216856844588711603153276\n\ 70386486105843025439939619828917593665686757934951\n\ 62176457141856560629502157223196586755079324193331\n\ 64906352462741904929101432445813822663347944758178\n\ 92575867718337217661963751590579239728245598838407\n\ 58203565325359399008402633568948830189458628227828\n\ 80181199384826282014278194139940567587151170094390\n\ 35398664372827112653829987240784473053190104293586\n\ 86515506006295864861532075273371959191420517255829\n\ 71693888707715466499115593487603532921714970056938\n\ 54370070576826684624621495650076471787294438377604\n\ 53282654108756828443191190634694037855217779295145\n\ 36123272525000296071075082563815656710885258350721\n\ 45876576172410976447339110607218265236877223636045\n\ 17423706905851860660448207621209813287860733969412\n\ 81142660418086830619328460811191061556940512689692\n\ 51934325451728388641918047049293215058642563049483\n\ 62467221648435076201727918039944693004732956340691\n\ 15732444386908125794514089057706229429197107928209\n\ 55037687525678773091862540744969844508330393682126\n\ 18336384825330154686196124348767681297534375946515\n\ 80386287592878490201521685554828717201219257766954\n\ 78182833757993103614740356856449095527097864797581\n\ 16726320100436897842553539920931837441497806860984\n\ 48403098129077791799088218795327364475675590848030\n\ 87086987551392711854517078544161852424320693150332\n\ 59959406895756536782107074926966537676326235447210\n\ 69793950679652694742597709739166693763042633987085\n\ 41052684708299085211399427365734116182760315001271\n\ 65378607361501080857009149939512557028198746004375\n\ 35829035317434717326932123578154982629742552737307\n\ 94953759765105305946966067683156574377167401875275\n\ 88902802571733229619176668713819931811048770190271\n\ 25267680276078003013678680992525463401061632866526\n\ 36270218540497705585629946580636237993140746255962\n\ 24074486908231174977792365466257246923322810917141\n\ 91430288197103288597806669760892938638285025333403\n\ 34413065578016127815921815005561868836468420090470\n\ 23053081172816430487623791969842487255036638784583\n\ 11487696932154902810424020138335124462181441773470\n\ 63783299490636259666498587618221225225512486764533\n\ 67720186971698544312419572409913959008952310058822\n\ 95548255300263520781532296796249481641953868218774\n\ 76085327132285723110424803456124867697064507995236\n\ 37774242535411291684276865538926205024910326572967\n\ 23701913275725675285653248258265463092207058596522\n\ 29798860272258331913126375147341994889534765745501\n\ 18495701454879288984856827726077713721403798879715\n\ 38298203783031473527721580348144513491373226651381\n\ 34829543829199918180278916522431027392251122869539\n\ 40957953066405232632538044100059654939159879593635\n\ 29746152185502371307642255121183693803580388584903\n\ 41698116222072977186158236678424689157993532961922\n\ 62467957194401269043877107275048102390895523597457\n\ 23189706772547915061505504953922979530901129967519\n\ 86188088225875314529584099251203829009407770775672\n\ 11306739708304724483816533873502340845647058077308\n\ 82959174767140363198008187129011875491310547126581\n\ 97623331044818386269515456334926366572897563400500\n\ 42846280183517070527831839425882145521227251250327\n\ 55121603546981200581762165212827652751691296897789\n\ 32238195734329339946437501907836945765883352399886\n\ 75506164965184775180738168837861091527357929701337\n\ 62177842752192623401942399639168044983993173312731\n\ 32924185707147349566916674687634660915035914677504\n\ 99518671430235219628894890102423325116913619626622\n\ 73267460800591547471830798392868535206946944540724\n\ 76841822524674417161514036427982273348055556214818\n\ 97142617910342598647204516893989422179826088076852\n\ 87783646182799346313767754307809363333018982642090\n\ 10848802521674670883215120185883543223812876952786\n\ 71329612474782464538636993009049310363619763878039\n\ 62184073572399794223406235393808339651327408011116\n\ 66627891981488087797941876876144230030984490851411\n\ 60661826293682836764744779239180335110989069790714\n\ 85786944089552990653640447425576083659976645795096\n\ 66024396409905389607120198219976047599490197230297\n\ 64913982680032973156037120041377903785566085089252\n\ 16730939319872750275468906903707539413042652315011\n\<|fim▁hole|> 94809377245048795150954100921645863754710598436791\n\ 78639167021187492431995700641917969777599028300699\n\ 15368713711936614952811305876380278410754449733078\n\ 40789923115535562561142322423255033685442488917353\n\ 44889911501440648020369068063960672322193204149535\n\ 41503128880339536053299340368006977710650566631954\n\ 81234880673210146739058568557934581403627822703280\n\ 82616570773948327592232845941706525094512325230608\n\ 22918802058777319719839450180888072429661980811197\n\ 77158542502016545090413245809786882778948721859617\n\ 72107838435069186155435662884062257473692284509516\n\ 20849603980134001723930671666823555245252804609722\n\ 53503534226472524250874054075591789781264330331690".to_string(); let first_digits: usize = 10; let mut sum: Vec<u8> = Vec::new(); let mut nums: Vec<Vec<u8>> = Vec::new(); for line in raw.split('\n') { // Push reversed vector of u8 digits nums.push(line.chars().map(|x| x.to_digit(10).unwrap() as u8).rev().collect()) } let mut rem:u16 = 0; for i in 0..nums[0].len() { //first_digits { let mut col_sum:u16 = rem; for j in 0..nums.len() { col_sum += nums[j][i] as u16; } sum.push((col_sum % 10u16) as u8); // Do remainder and carry with integer division rem = col_sum / 10; } // Drain remainder digits while rem > 0 { sum.push((rem % 10) as u8); rem /= 10; } let mut i: usize = 0; print!("Answer: "); for s in sum.iter().rev() { i += 1; print!("{}", s); if i >= first_digits { break } } println!(""); }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python """GRR restful API rendering plugins.""" # pylint: disable=unused-import<|fim▁hole|>from grr.gui.api_plugins import docs from grr.gui.api_plugins import hunt from grr.gui.api_plugins import reflection from grr.gui.api_plugins import stats<|fim▁end|>
from grr.gui.api_plugins import aff4 from grr.gui.api_plugins import artifact from grr.gui.api_plugins import config
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Collision detection algorithms<|fim▁hole|><|fim▁end|>
pub mod minkowski; pub mod broad_phase;
<|file_name|>moment.ts<|end_file_name|><|fim▁begin|>import 'moment';<|fim▁hole|> if (!date) return undefined; return moment(date).format(format); } }<|fim▁end|>
export class MomentValueConverter { toView(date: Date, format: string) { if (!format) format = 'LLL';
<|file_name|>twitter.py<|end_file_name|><|fim▁begin|>import urllib from askbot.deps.django_authopenid.util import OAuthConnection class Twitter(OAuthConnection):<|fim▁hole|> super(Twitter, self).__init__('twitter') self.tweet_url = 'https://api.twitter.com/1.1/statuses/update.json' def tweet(self, text, access_token=None): client = self.get_client(access_token) body = urllib.urlencode({'status': text}) return self.send_request(client, self.tweet_url, 'POST', body=body)<|fim▁end|>
def __init__(self):
<|file_name|>sigecoin_vi.ts<|end_file_name|><|fim▁begin|><TS language="vi" version="2.1"> <context> <name>AddressBookPage</name> <message> <source>Create a new address</source> <translation>Tạo một địa chỉ mới</translation> </message> <message> <source>&amp;New</source> <translation>Tạo mới</translation> </message> <message> <source>Copy the currently selected address to the system clipboard</source> <translation>Sao chép các địa chỉ đã được chọn vào bộ nhớ tạm thời của hệ thống</translation> </message> <message> <source>&amp;Copy</source> <translation>Sao chép</translation> </message> <message> <source>&amp;Delete</source> <translation>&amp;Xóa</translation> </message> </context> <context> <name>AddressTableModel</name> </context> <context> <name>AskPassphraseDialog</name> </context> <context> <name>BanTableModel</name> </context> <context> <name>SigecoinGUI</name> </context> <context> <name>CoinControlDialog</name> <message> <source>Amount:</source> <translation>Số lượng:</translation> </message> <message> <source>Amount</source> <translation>Số lượng</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <source>&amp;Label</source> <translation>Nhãn dữ liệu</translation> </message> <message> <source>&amp;Address</source> <translation>Địa chỉ</translation> </message> </context> <context> <name>FreespaceChecker</name> </context> <context> <name>HelpMessageDialog</name> </context> <context> <name>Intro</name> </context> <context> <name>ModalOverlay</name> </context> <context> <name>OpenURIDialog</name> </context> <context> <name>OptionsDialog</name> </context> <context> <name>OverviewPage</name> </context> <context> <name>PaymentServer</name> </context> <context> <name>PeerTableModel</name> </context> <context> <name>QObject</name> <message> <source>Amount</source> <translation>Số lượng</translation> </message> </context> <context> <name>QObject::QObject</name> </context> <context> <name>QRImageWidget</name> </context> <context> <name>RPCConsole</name> </context><|fim▁hole|><context> <name>ReceiveRequestDialog</name> <message> <source>Copy &amp;Address</source> <translation>Sao chép địa chỉ</translation> </message> </context> <context> <name>RecentRequestsTableModel</name> </context> <context> <name>SendCoinsDialog</name> <message> <source>Amount:</source> <translation>Số lượng:</translation> </message> </context> <context> <name>SendCoinsEntry</name> </context> <context> <name>SendConfirmationDialog</name> </context> <context> <name>ShutdownWindow</name> </context> <context> <name>SignVerifyMessageDialog</name> </context> <context> <name>SplashScreen</name> </context> <context> <name>TrafficGraphWidget</name> </context> <context> <name>TransactionDesc</name> </context> <context> <name>TransactionDescDialog</name> </context> <context> <name>TransactionTableModel</name> </context> <context> <name>TransactionView</name> </context> <context> <name>UnitDisplayStatusBarControl</name> </context> <context> <name>WalletFrame</name> </context> <context> <name>WalletModel</name> </context> <context> <name>WalletView</name> </context> <context> <name>sigecoin-engine</name> </context> </TS><|fim▁end|>
<context> <name>ReceiveCoinsDialog</name> </context>
<|file_name|>testmarkint.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2019 Ondrej Starek * * This file is part of OTest2. * * OTest2 is free software: you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, * or (at your option) any later version. * * OTest2 is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with OTest2. If not, see <http://www.gnu.org/licenses/>. */ #include <testmarkint.h> #include <assert.h> #include <iostream> #include <testmarkhash.h> #include <testmarkin.h> #include <testmarkout.h> namespace OTest2 { namespace { const char SERIALIZE_TYPE_MARK[] = "ot2:int"; } /* -- namespace */ TestMarkInt::TestMarkInt( int64_t value_) : value(value_) {<|fim▁hole|>} TestMarkInt::TestMarkInt( const CtorMark*) : value(0) { } TestMarkInt::~TestMarkInt() { } const char* TestMarkInt::typeMark() { return SERIALIZE_TYPE_MARK; } TestMarkHashCode TestMarkInt::doGetHashCode() const noexcept { return TestMarkHash::hashBasicType(SERIALIZE_TYPE_MARK, value); } bool TestMarkInt::doIsEqual( const TestMark& other_, long double precision_) const { return value == static_cast<const TestMarkInt*>(&other_)->value; } bool TestMarkInt::doIsEqualValue( const TestMark& other_, long double precision_) const { return doIsEqual(other_, precision_); } void TestMarkInt::doDiffArray( int level_, std::vector<LinearizedRecord>& array_) const { /* -- there are no children */ } void TestMarkInt::doLinearizedMark( int level_, const std::string& label_, std::vector<LinearizedRecord>& array_) const { array_.push_back({level_, this, label_}); } void TestMarkInt::doPrintOpen( std::ostream& os_, const std::string& prefix_) const { os_ << prefix_ << value << '\n'; } void TestMarkInt::doPrintClose( std::ostream& os_, const std::string& prefix_) const { /* -- nothing to do */ } void TestMarkInt::doSerializeMark( TestMarkOut& serializer_) const { serializer_.writeTypeMark(SERIALIZE_TYPE_MARK); serializer_.writeInt(value); } void TestMarkInt::doDeserializeMark( TestMarkFactory& factory_, TestMarkIn& deserializer_) { value = deserializer_.readInt(); } } /* namespace OTest2 */<|fim▁end|>
<|file_name|>SendMessageUtil.java<|end_file_name|><|fim▁begin|>package com.wjyup.coolq.util; import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSONObject;<|fim▁hole|>import com.google.common.hash.HashCode; import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; import com.google.gson.JsonObject; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.springframework.util.DigestUtils; import java.nio.charset.StandardCharsets; /** * 发送消息工具类 * @author WJY */ public class SendMessageUtil { private static Logger log = LogManager.getLogger(SendMessageUtil.class); /** * 发送json数据并获取返回值 * @param message 消息 * @return 发送消息的结果 */ public static String sendSocketData(String message){ try { ConfigCache configCache = SpringContext.getConfigCache(); //判断发送消息方式 if(StaticConf.MSG_SEND_TYPE_HTTP.equalsIgnoreCase(configCache.getMSG_SEND_TYPE())){// http String url = String.format("http://%s:%s", configCache.getHTTP_HOST(), configCache.getHTTP_PORT()); if(configCache.isUSE_TOKEN()){// 使用token long authTime = System.currentTimeMillis() / 1000; String key = configCache.getKEY()+":"+authTime; String authToken = DigestUtils.md5DigestAsHex(key.getBytes(StandardCharsets.UTF_8)); JSONObject jsonObject = JSON.parseObject(message); jsonObject.put("authTime", authTime); jsonObject.put("authToken", authToken); message = jsonObject.toJSONString(); } log.debug("发送的json文本:"+message); try{ String result = WebUtil.post(url, message); log.debug("返回结果:" + result); return result; }catch (Exception e){ log.error(e.getMessage(),e); } } } catch (Exception e) { log.error(e.getMessage(), e); } return null; } }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from .polarpoint import PolarPoint __all__ = [ 'PolarPoint',<|fim▁hole|>]<|fim▁end|>
<|file_name|>testcase_lmd_loader.js<|end_file_name|><|fim▁begin|>(function (require) { var test = require('test'), asyncTest = require('asyncTest'), start = require('start'), module = require('module'), ok = require('ok'), expect = require('expect'), $ = require('$'), document = require('document'), raises = require('raises'), rnd = '?' + Math.random(), ENV_NAME = require('worker_some_global_var') ? 'Worker' : require('node_some_global_var') ? 'Node' : 'DOM'; function getComputedStyle(element, rule) { if(document.defaultView && document.defaultView.getComputedStyle){ return document.defaultView.getComputedStyle(element, "").getPropertyValue(rule); } rule = rule.replace(/\-(\w)/g, function (strMatch, p1){ return p1.toUpperCase(); }); return element.currentStyle[rule]; } module('LMD loader @ ' + ENV_NAME); asyncTest("require.js()", function () { expect(6); require.js('./modules/loader/non_lmd_module.js' + rnd, function (script_tag) { ok(typeof script_tag === "object" && script_tag.nodeName.toUpperCase() === "SCRIPT", "should return script tag on success"); ok(require('some_function')() === true, "we can grab content of the loaded script"); ok(require('./modules/loader/non_lmd_module.js' + rnd) === script_tag, "should cache script tag on success"); // some external require.js('http://yandex.ru/jquery.js' + rnd, function (script_tag) { ok(typeof script_tag === "undefined", "should return undefined on error in 3 seconds"); ok(typeof require('http://yandex.ru/jquery.js' + rnd) === "undefined", "should not cache errorous modules"); require.js('module_as_string', function (module_as_string) { require.async('module_as_string', function (module_as_string_expected) { ok(module_as_string === module_as_string_expected, 'require.js() acts like require.async() if in-package/declared module passed'); start(); }); }); }); }); }); asyncTest("require.js() JSON callback and chain calls", function () { expect(2); var id = require('setTimeout')(function () { ok(false, 'JSONP call fails'); start(); }, 3000); require('window').someJsonHandler = function (result) { ok(result.ok, 'JSON called'); require('window').someJsonHandler = null; require('clearTimeout')(id); start(); }; var requireReturned = require.js('./modules/loader/non_lmd_module.jsonp.js' + rnd); ok(typeof requireReturned === "function", "require.js() must return require"); }); asyncTest("require.js() race calls", function () { expect(1); var result; var check_result = function (scriptTag) { <|fim▁hole|> } else { ok(result === scriptTag, "Must perform one call. Results must be the same"); start(); } }; require.js('./modules/loader_race/non_lmd_module.js' + rnd, check_result); require.js('./modules/loader_race/non_lmd_module.js' + rnd, check_result); }); asyncTest("require.js() shortcut", function () { expect(5); require.js('sk_js_js', function (script_tag) { ok(typeof script_tag === "object" && script_tag.nodeName.toUpperCase() === "SCRIPT", "should return script tag on success"); ok(require('sk_js_js') === script_tag, "require should return the same result"); require.js('sk_js_js', function (script_tag2) { ok(script_tag2 === script_tag, 'should load once'); ok(require('sk_js_js') === require('/modules/shortcuts/js.js'), "should be defined using path-to-module"); ok(typeof require('shortcuts_js') === "function", 'Should create a global function shortcuts_js as in module function'); start(); }) }); }); // -- CSS asyncTest("require.css()", function () { expect(4); require.css('./modules/loader/some_css.css' + rnd, function (link_tag) { ok(typeof link_tag === "object" && link_tag.nodeName.toUpperCase() === "LINK", "should return link tag on success"); ok(getComputedStyle(document.getElementById('qunit-fixture'), 'visibility') === "hidden", "css should be applied"); ok(require('./modules/loader/some_css.css' + rnd) === link_tag, "should cache link tag on success"); require.css('module_as_string', function (module_as_string) { require.async('module_as_string', function (module_as_string_expected) { ok(module_as_string === module_as_string_expected, 'require.css() acts like require.async() if in-package/declared module passed'); start(); }); }); }); }); asyncTest("require.css() CSS loader without callback", function () { expect(1); var requireReturned = require .css('./modules/loader/some_css_callbackless.css' + rnd) .css('./modules/loader/some_css_callbackless.css' + rnd + 1); ok(typeof requireReturned === "function", "require.css() must return require"); start(); }); asyncTest("require.css() race calls", function () { expect(1); var result; var check_result = function (linkTag) { if (typeof result === "undefined") { result = linkTag; } else { ok(result === linkTag, "Must perform one call. Results must be the same"); start(); } }; require.css('./modules/loader_race/some_css.css' + rnd, check_result); require.css('./modules/loader_race/some_css.css' + rnd, check_result); }); asyncTest("require.css() shortcut", function () { expect(4); require.css('sk_css_css', function (link_tag) { ok(typeof link_tag === "object" && link_tag.nodeName.toUpperCase() === "LINK", "should return link tag on success"); ok(require('sk_css_css') === link_tag, "require should return the same result"); require.css('sk_css_css', function (link_tag2) { ok(link_tag2 === link_tag, 'should load once'); ok(require('sk_css_css') === require('/modules/shortcuts/css.css'), "should be defined using path-to-module"); start(); }) }); }); asyncTest("require.css() cross origin", function () { expect(2); require.css('sk_css_xdomain', function (link_tag) { ok(typeof link_tag === "object" && link_tag.nodeName.toUpperCase() === "LINK", "should return link tag on success"); ok(getComputedStyle(document.body, 'min-width') === "960px", "css should be applied"); start(); }); }); // -- image asyncTest("require.image()", function () { expect(5); require.image('./modules/loader/image.gif' + rnd, function (img_tag) { ok(typeof img_tag === "object" && img_tag.nodeName.toUpperCase() === "IMG", "should return img tag on success"); ok(require('./modules/loader/image.gif' + rnd) === img_tag, "should cache img tag on success"); require.image('./modules/loader/image_404.gif' + rnd, function (img_tag) { ok(typeof img_tag === "undefined", "should return undefined on error in 3 seconds"); ok(typeof require('./modules/loader/image_404.gif' + rnd) === "undefined", "should not cache errorous modules"); require.image('module_as_string', function (module_as_string) { require.async('module_as_string', function (module_as_string_expected) { ok(module_as_string === module_as_string_expected, 'require.image() acts like require.async() if in-package/declared module passed'); start(); }); }); }); }); }); asyncTest("require.image() image loader without callback", function () { expect(1); var requireReturned = require .image('./modules/loader/image_callbackless.gif' + rnd) .image('./modules/loader/image_callbackless.gif' + rnd + 1); ok(typeof requireReturned === "function", "require.image() must return require"); start(); }); asyncTest("require.image() race calls", function () { expect(1); var result; var check_result = function (linkTag) { if (typeof result === "undefined") { result = linkTag; } else { ok(result === linkTag, "Must perform one call. Results must be the same"); start(); } }; require.image('./modules/loader_race/image.gif' + rnd, check_result); require.image('./modules/loader_race/image.gif' + rnd, check_result); }); asyncTest("require.image() shortcut", function () { expect(4); require.image('sk_image_image', function (img_tag) { ok(typeof img_tag === "object" && img_tag.nodeName.toUpperCase() === "IMG", "should return img tag on success"); ok(require('sk_image_image') === img_tag, "require should return the same result"); require.image('sk_image_image', function (img_tag2) { ok(img_tag2 === img_tag, 'should load once'); ok(require('sk_image_image') === require('/modules/shortcuts/image.gif'), "should be defined using path-to-module"); start(); }) }); }); })<|fim▁end|>
if (typeof result === "undefined") { result = scriptTag;
<|file_name|>entry.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright (C) 2013-2014 Avencall # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/> common = {} execfile_('common.py', common) MODELS = [ u'7906G', u'7911G',<|fim▁hole|> u'7961G', u'7962G', ] class CiscoSccpPlugin(common['BaseCiscoSccpPlugin']): IS_PLUGIN = True pg_associator = common['BaseCiscoPgAssociator'](MODELS)<|fim▁end|>
u'7931G', u'7941G', u'7942G',
<|file_name|>lithos_switch.rs<|end_file_name|><|fim▁begin|>extern crate libc; extern crate nix; extern crate env_logger; extern crate regex; extern crate argparse; extern crate quire; #[macro_use] extern crate log; extern crate lithos; use std::env; use std::io::{stderr, Read, Write}; use std::process::exit; use std::path::{Path, PathBuf}; use std::str::FromStr; use std::fs::{File}; use std::fs::{copy, rename}; use std::process::{Command, Stdio}; use argparse::{ArgumentParser, Parse, StoreTrue, Print}; use quire::{parse_config, Options}; use nix::sys::signal::{SIGQUIT, kill}; use nix::unistd::Pid; use lithos::master_config::MasterConfig; use lithos::sandbox_config::SandboxConfig; fn switch_config(master_cfg: &Path, sandbox_name: String, config_file: &Path) -> Result<(), String> { match Command::new(env::current_exe().unwrap() .parent().unwrap().join("lithos_check")) .stdin(Stdio::inherit()) .stdout(Stdio::inherit()) .stderr(Stdio::inherit()) .arg("--config") .arg(&master_cfg) .arg("--sandbox") .arg(&sandbox_name) .arg("--alternate-config") .arg(&config_file) .output() { Ok(ref po) if po.status.code() == Some(0) => { } Ok(ref po) => { return Err(format!( "Configuration check failed with exit status: {}", po.status)); } Err(e) => { return Err(format!("Can't check configuration: {}", e)); } } info!("Checked. Proceeding"); let master: MasterConfig = match parse_config(&master_cfg, &MasterConfig::validator(), &Options::default()) { Ok(cfg) => cfg, Err(e) => { return Err(format!("Can't parse master config: {}", e)); } }; let sandbox_fn = master_cfg.parent().unwrap() .join(&master.sandboxes_dir) .join(&(sandbox_name.clone() + ".yaml")); let sandbox: SandboxConfig = match parse_config(&sandbox_fn, &SandboxConfig::validator(), &Options::default()) { Ok(cfg) => cfg, Err(e) => { return Err(format!("Can't parse sandbox config: {}", e)); } }; let target_fn = master_cfg.parent().unwrap() .join(&master.processes_dir) .join(sandbox.config_file.as_ref().unwrap_or( &PathBuf::from(&(sandbox_name.clone() + ".yaml")))); debug!("Target filename {:?}", target_fn); let tmp_filename = target_fn.with_file_name( &format!(".tmp.{}", sandbox_name)); try!(copy(&config_file, &tmp_filename) .map_err(|e| format!("Error copying: {}", e))); try!(rename(&tmp_filename, &target_fn) .map_err(|e| format!("Error replacing file: {}", e))); info!("Done. Sending SIGQUIT to lithos_tree"); let pid_file = master.runtime_dir.join("master.pid"); let mut buf = String::with_capacity(50); let read_pid = File::open(&pid_file) .and_then(|mut f| f.read_to_string(&mut buf)) .ok() .and_then(|_| FromStr::from_str(buf[..].trim()).ok()) .map(Pid::from_raw); match read_pid { Some(pid) if kill(pid, None).is_ok() => { kill(pid, SIGQUIT) .map_err(|e| error!("Error sending QUIT to master: {:?}", e)).ok(); } Some(pid) => { warn!("Process with pid {} is not running...", pid); } None => { warn!("Can't read pid file {}. Probably daemon is not running.", pid_file.display()); } }; return Ok(()); } fn main() { if env::var("RUST_LOG").is_err() { env::set_var("RUST_LOG", "warn"); } env_logger::init(); let mut master_config = PathBuf::from("/etc/lithos/master.yaml"); let mut verbose = false; let mut config_file = PathBuf::from(""); let mut sandbox_name = "".to_string(); { let mut ap = ArgumentParser::new(); ap.set_description("Checks if lithos configuration is ok"); ap.refer(&mut master_config) .add_option(&["--master"], Parse, "Name of the master configuration file \<|fim▁hole|> .metavar("FILE"); ap.refer(&mut verbose) .add_option(&["-v", "--verbose"], StoreTrue, "Verbose configuration"); ap.refer(&mut sandbox_name) .add_argument("sandbox", Parse, "Name of the sandbox which configuration will be switched for") .required() .metavar("NAME"); ap.refer(&mut config_file) .add_argument("new_config", Parse, " Name of the process configuration file for this sandbox to switch to. The file is copied over current config after configuration is validated and just before sending a signal to lithos_tree.") .metavar("FILE") .required(); ap.add_option(&["--version"], Print(env!("CARGO_PKG_VERSION").to_string()), "Show version"); match ap.parse_args() { Ok(()) => {} Err(x) => { exit(x); } } } match switch_config(&master_config, sandbox_name, &config_file) { Ok(()) => { exit(0); } Err(e) => { write!(&mut stderr(), "Fatal error: {}\n", e).unwrap(); exit(1); } } }<|fim▁end|>
(default /etc/lithos/master.yaml)")
<|file_name|>test_list_image_filters.py<|end_file_name|><|fim▁begin|># Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import time from oslo_log import log as logging import six import testtools from tempest.api.compute import base from tempest.common.utils import data_utils from tempest.common import waiters from tempest import config from tempest import test CONF = config.CONF LOG = logging.getLogger(__name__) class ListImageFiltersTestJSON(base.BaseV2ComputeTest): @classmethod def skip_checks(cls): super(ListImageFiltersTestJSON, cls).skip_checks() if not CONF.service_available.glance: skip_msg = ("%s skipped as glance is not available" % cls.__name__) raise cls.skipException(skip_msg) @classmethod def setup_clients(cls): super(ListImageFiltersTestJSON, cls).setup_clients() cls.client = cls.images_client cls.glance_client = cls.os.image_client @classmethod def resource_setup(cls): super(ListImageFiltersTestJSON, cls).resource_setup() def _create_image(): name = data_utils.rand_name('image') body = cls.glance_client.create_image(name=name, container_format='bare', disk_format='raw', is_public=False)['image'] image_id = body['id'] cls.images.append(image_id) # Wait 1 second between creation and upload to ensure a delta # between created_at and updated_at. time.sleep(1) image_file = six.StringIO(('*' * 1024)) cls.glance_client.update_image(image_id, data=image_file) waiters.wait_for_image_status(cls.client, image_id, 'ACTIVE') body = cls.client.show_image(image_id)['image'] return body # Create non-snapshot images via glance cls.image1 = _create_image() cls.image1_id = cls.image1['id'] cls.image2 = _create_image() cls.image2_id = cls.image2['id'] cls.image3 = _create_image() cls.image3_id = cls.image3['id'] if not CONF.compute_feature_enabled.snapshot: return # Create instances and snapshots via nova cls.server1 = cls.create_test_server() cls.server2 = cls.create_test_server(wait_until='ACTIVE') # NOTE(sdague) this is faster than doing the sync wait_util on both waiters.wait_for_server_status(cls.servers_client, cls.server1['id'], 'ACTIVE') # Create images to be used in the filter tests cls.snapshot1 = cls.create_image_from_server( cls.server1['id'], wait_until='ACTIVE') cls.snapshot1_id = cls.snapshot1['id'] # Servers have a hidden property for when they are being imaged # Performing back-to-back create image calls on a single # server will sometimes cause failures cls.snapshot3 = cls.create_image_from_server( cls.server2['id'], wait_until='ACTIVE') cls.snapshot3_id = cls.snapshot3['id'] # Wait for the server to be active after the image upload cls.snapshot2 = cls.create_image_from_server( cls.server1['id'], wait_until='ACTIVE') cls.snapshot2_id = cls.snapshot2['id'] @test.idempotent_id('a3f5b513-aeb3-42a9-b18e-f091ef73254d') def test_list_images_filter_by_status(self): # The list of images should contain only images with the # provided status params = {'status': 'ACTIVE'} images = self.client.list_images(**params)['images'] self.assertTrue(any([i for i in images if i['id'] == self.image1_id])) self.assertTrue(any([i for i in images if i['id'] == self.image2_id])) self.assertTrue(any([i for i in images if i['id'] == self.image3_id])) @test.idempotent_id('33163b73-79f5-4d07-a7ea-9213bcc468ff') def test_list_images_filter_by_name(self): # List of all images should contain the expected images filtered # by name params = {'name': self.image1['name']} images = self.client.list_images(**params)['images'] self.assertTrue(any([i for i in images if i['id'] == self.image1_id])) self.assertFalse(any([i for i in images if i['id'] == self.image2_id])) self.assertFalse(any([i for i in images if i['id'] == self.image3_id])) @test.idempotent_id('9f238683-c763-45aa-b848-232ec3ce3105') @testtools.skipUnless(CONF.compute_feature_enabled.snapshot, 'Snapshotting is not available.') def test_list_images_filter_by_server_id(self): # The images should contain images filtered by server id params = {'server': self.server1['id']} images = self.client.list_images(**params)['images'] self.assertTrue(any([i for i in images if i['id'] == self.snapshot1_id]), "Failed to find image %s in images. Got images %s" % (self.image1_id, images)) self.assertTrue(any([i for i in images if i['id'] == self.snapshot2_id])) self.assertFalse(any([i for i in images if i['id'] == self.snapshot3_id])) @test.idempotent_id('05a377b8-28cf-4734-a1e6-2ab5c38bf606') @testtools.skipUnless(CONF.compute_feature_enabled.snapshot, 'Snapshotting is not available.') def test_list_images_filter_by_server_ref(self): # The list of servers should be filtered by server ref server_links = self.server2['links'] # Try all server link types for link in server_links: params = {'server': link['href']} images = self.client.list_images(**params)['images'] self.assertFalse(any([i for i in images if i['id'] == self.snapshot1_id])) self.assertFalse(any([i for i in images if i['id'] == self.snapshot2_id])) self.assertTrue(any([i for i in images if i['id'] == self.snapshot3_id])) @test.idempotent_id('e3356918-4d3e-4756-81d5-abc4524ba29f') @testtools.skipUnless(CONF.compute_feature_enabled.snapshot, 'Snapshotting is not available.') def test_list_images_filter_by_type(self): # The list of servers should be filtered by image type params = {'type': 'snapshot'} images = self.client.list_images(**params)['images'] self.assertTrue(any([i for i in images if i['id'] == self.snapshot1_id])) self.assertTrue(any([i for i in images if i['id'] == self.snapshot2_id])) self.assertTrue(any([i for i in images if i['id'] == self.snapshot3_id])) self.assertFalse(any([i for i in images if i['id'] == self.image_ref])) @test.idempotent_id('3a484ca9-67ba-451e-b494-7fcf28d32d62') def test_list_images_limit_results(self): # Verify only the expected number of results are returned params = {'limit': '1'} images = self.client.list_images(**params)['images'] self.assertEqual(1, len([x for x in images if 'id' in x])) @test.idempotent_id('18bac3ae-da27-436c-92a9-b22474d13aab') def test_list_images_filter_by_changes_since(self): # Verify only updated images are returned in the detailed list # Becoming ACTIVE will modify the updated time # Filter by the image's created time params = {'changes-since': self.image3['created']} images = self.client.list_images(**params)['images'] found = any([i for i in images if i['id'] == self.image3_id]) self.assertTrue(found) @test.idempotent_id('9b0ea018-6185-4f71-948a-a123a107988e') def test_list_images_with_detail_filter_by_status(self): # Detailed list of all images should only contain images # with the provided status params = {'status': 'ACTIVE'} images = self.client.list_images(detail=True, **params)['images'] self.assertTrue(any([i for i in images if i['id'] == self.image1_id])) self.assertTrue(any([i for i in images if i['id'] == self.image2_id])) self.assertTrue(any([i for i in images if i['id'] == self.image3_id])) @test.idempotent_id('644ea267-9bd9-4f3b-af9f-dffa02396a17') def test_list_images_with_detail_filter_by_name(self): # Detailed list of all images should contain the expected # images filtered by name params = {'name': self.image1['name']} images = self.client.list_images(detail=True, **params)['images'] self.assertTrue(any([i for i in images if i['id'] == self.image1_id])) self.assertFalse(any([i for i in images if i['id'] == self.image2_id])) self.assertFalse(any([i for i in images if i['id'] == self.image3_id])) @test.idempotent_id('ba2fa9a9-b672-47cc-b354-3b4c0600e2cb') def test_list_images_with_detail_limit_results(self): # Verify only the expected number of results (with full details) # are returned params = {'limit': '1'} images = self.client.list_images(detail=True, **params)['images'] self.assertEqual(1, len(images)) @test.idempotent_id('8c78f822-203b-4bf6-8bba-56ebd551cf84') @testtools.skipUnless(CONF.compute_feature_enabled.snapshot, 'Snapshotting is not available.') def test_list_images_with_detail_filter_by_server_ref(self): # Detailed list of servers should be filtered by server ref server_links = self.server2['links'] # Try all server link types for link in server_links: params = {'server': link['href']} images = self.client.list_images(detail=True, **params)['images'] <|fim▁hole|> self.assertTrue(any([i for i in images if i['id'] == self.snapshot3_id])) @test.idempotent_id('888c0cc0-7223-43c5-9db0-b125fd0a393b') @testtools.skipUnless(CONF.compute_feature_enabled.snapshot, 'Snapshotting is not available.') def test_list_images_with_detail_filter_by_type(self): # The detailed list of servers should be filtered by image type params = {'type': 'snapshot'} images = self.client.list_images(detail=True, **params)['images'] self.client.show_image(self.image_ref) self.assertTrue(any([i for i in images if i['id'] == self.snapshot1_id])) self.assertTrue(any([i for i in images if i['id'] == self.snapshot2_id])) self.assertTrue(any([i for i in images if i['id'] == self.snapshot3_id])) self.assertFalse(any([i for i in images if i['id'] == self.image_ref])) @test.idempotent_id('7d439e18-ac2e-4827-b049-7e18004712c4') def test_list_images_with_detail_filter_by_changes_since(self): # Verify an update image is returned # Becoming ACTIVE will modify the updated time # Filter by the image's created time params = {'changes-since': self.image1['created']} images = self.client.list_images(detail=True, **params)['images'] self.assertTrue(any([i for i in images if i['id'] == self.image1_id]))<|fim▁end|>
self.assertFalse(any([i for i in images if i['id'] == self.snapshot1_id])) self.assertFalse(any([i for i in images if i['id'] == self.snapshot2_id]))
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* main.rs */ #[crate_id = "main#0.1"]; #[comment = "ironkernel"]; #[license = "MIT"]; #[crate_type = "lib"]; // Forked from pczarn/rustboot #[no_std]; #[feature(asm, globs, macro_rules)]; extern mod core; #[cfg(target_arch = "arm")] pub use support::{memcpy, memmove}; <|fim▁hole|> #[cfg(target_arch = "arm")] #[path = "rust-core/support.rs"] mod support; #[cfg(target_arch = "arm")] #[path = "arch/arm/"] mod platform { pub mod cpu; pub mod io; pub mod drivers; }<|fim▁end|>
use platform::{cpu, io}; pub mod kernel;
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import include, url from sapl.sessao.views import (AdicionarVariasMateriasExpediente, AdicionarVariasMateriasOrdemDia, BancadaCrud, CargoBancadaCrud, ExpedienteMateriaCrud, ExpedienteView, JustificativaAusenciaCrud, OcorrenciaSessaoView, ConsideracoesFinaisView, MateriaOrdemDiaCrud, OradorOrdemDiaCrud, MesaView, OradorCrud, OradorExpedienteCrud, PainelView, PautaSessaoDetailView, PautaSessaoView, PesquisarPautaSessaoView, PesquisarSessaoPlenariaView, PresencaOrdemDiaView, PresencaView, ResumoOrdenacaoView, ResumoView, ResumoAtaView, RetiradaPautaCrud, SessaoCrud, TipoJustificativaCrud, TipoExpedienteCrud, TipoResultadoVotacaoCrud, TipoExpedienteCrud, TipoResultadoVotacaoCrud, TipoRetiradaPautaCrud, TipoSessaoCrud, VotacaoEditView, VotacaoExpedienteEditView, VotacaoExpedienteView, VotacaoNominalEditView, VotacaoNominalExpedienteDetailView, VotacaoNominalExpedienteEditView, VotacaoNominalExpedienteView, VotacaoNominalTransparenciaDetailView, VotacaoSimbolicaTransparenciaDetailView, VotacaoNominalView, VotacaoView, abrir_votacao, atualizar_mesa, insere_parlamentar_composicao, mudar_ordem_materia_sessao, recuperar_materia, recuperar_numero_sessao_view, remove_parlamentar_composicao, reordena_materias, sessao_legislativa_legislatura_ajax, VotacaoEmBlocoOrdemDia, VotacaoEmBlocoExpediente, VotacaoEmBlocoSimbolicaView, VotacaoEmBlocoNominalView, recuperar_nome_tipo_sessao, ExpedienteLeituraView, OrdemDiaLeituraView, retirar_leitura, TransferenciaMateriasExpediente, TransferenciaMateriasOrdemDia, filtra_materias_copia_sessao_ajax, verifica_materia_sessao_plenaria_ajax) from .apps import AppConfig app_name = AppConfig.name urlpatterns = [ url(r'^sessao/', include(SessaoCrud.get_urls() + OradorCrud.get_urls() + OradorExpedienteCrud.get_urls() + ExpedienteMateriaCrud.get_urls() + JustificativaAusenciaCrud.get_urls() + MateriaOrdemDiaCrud.get_urls() + OradorOrdemDiaCrud.get_urls() + RetiradaPautaCrud.get_urls())), url(r'^sessao/(?P<pk>\d+)/mesa$', MesaView.as_view(), name='mesa'), url(r'^sessao/mesa/atualizar-mesa/$', atualizar_mesa, name='atualizar_mesa'), url(r'^sessao/mesa/insere-parlamentar/composicao/$', insere_parlamentar_composicao, name='insere_parlamentar_composicao'), url(r'^sessao/mesa/remove-parlamentar-composicao/$', remove_parlamentar_composicao, name='remove_parlamentar_composicao'), url(r'^sessao/recuperar-materia/', recuperar_materia), url(r'^sessao/recuperar-numero-sessao/', recuperar_numero_sessao_view, name='recuperar_numero_sessao_view' ), url(r'^sessao/recuperar-nome-tipo-sessao/', recuperar_nome_tipo_sessao, name='recuperar_nome_tipo_sessao'), url(r'^sessao/sessao-legislativa-legislatura-ajax/', sessao_legislativa_legislatura_ajax, name='sessao_legislativa_legislatura_ajax_view'), url(r'^sessao/filtra-materias-copia-sessao-ajax/', filtra_materias_copia_sessao_ajax, name='filtra_materias_copia_sessao_ajax_view'), url(r'^sessao/verifica-materia-sessao-plenaria-ajax/', verifica_materia_sessao_plenaria_ajax, name='verifica_materia_sessao_plenaria_ajax_view'), url(r'^sessao/(?P<pk>\d+)/(?P<spk>\d+)/abrir-votacao$', abrir_votacao, name="abrir_votacao"), url(r'^sessao/(?P<pk>\d+)/reordena/(?P<tipo>[\w\-]+)/(?P<ordenacao>\d+)/$', reordena_materias, name="reordena_materias"), <|fim▁hole|> url(r'^sistema/sessao-plenaria/tipo-expediente/', include(TipoExpedienteCrud.get_urls())), url(r'^sistema/sessao-plenaria/tipo-justificativa/', include(TipoJustificativaCrud.get_urls())), url(r'^sistema/sessao-plenaria/tipo-retirada-pauta/', include(TipoRetiradaPautaCrud.get_urls())), url(r'^sistema/bancada/', include(BancadaCrud.get_urls())), url(r'^sistema/cargo-bancada/', include(CargoBancadaCrud.get_urls())), url(r'^sistema/resumo-ordenacao/', ResumoOrdenacaoView.as_view(), name='resumo_ordenacao'), url(r'^sessao/(?P<pk>\d+)/adicionar-varias-materias-expediente/', AdicionarVariasMateriasExpediente.as_view(), name='adicionar_varias_materias_expediente'), url(r'^sessao/(?P<pk>\d+)/adicionar-varias-materias-ordem-dia/', AdicionarVariasMateriasOrdemDia.as_view(), name='adicionar_varias_materias_ordem_dia'), # PAUTA SESSÃO url(r'^sessao/pauta-sessao$', PautaSessaoView.as_view(), name='pauta_sessao'), url(r'^sessao/pauta-sessao/pesquisar-pauta$', PesquisarPautaSessaoView.as_view(), name='pesquisar_pauta'), url(r'^sessao/pauta-sessao/(?P<pk>\d+)/(?:pdf)?$', PautaSessaoDetailView.as_view(), name='pauta_sessao_detail'), # Subnav sessão url(r'^sessao/(?P<pk>\d+)/expediente$', ExpedienteView.as_view(), name='expediente'), url(r'^sessao/(?P<pk>\d+)/ocorrencia_sessao$', OcorrenciaSessaoView.as_view(), name='ocorrencia_sessao'), url(r'^sessao/(?P<pk>\d+)/consideracoes_finais$', ConsideracoesFinaisView.as_view(), name='consideracoes_finais'), url(r'^sessao/(?P<pk>\d+)/presenca$', PresencaView.as_view(), name='presenca'), url(r'^sessao/(?P<pk>\d+)/painel$', PainelView.as_view(), name='painel'), url(r'^sessao/(?P<pk>\d+)/presencaordemdia$', PresencaOrdemDiaView.as_view(), name='presencaordemdia'), url(r'^sessao/(?P<pk>\d+)/votacao_bloco_ordemdia$', VotacaoEmBlocoOrdemDia.as_view(), name='votacao_bloco_ordemdia'), url(r'^sessao/(?P<pk>\d+)/votacao_bloco/votnom$', VotacaoEmBlocoNominalView.as_view(), name='votacaobloconom'), url(r'^sessao/(?P<pk>\d+)/votacao_bloco/votsimb$', VotacaoEmBlocoSimbolicaView.as_view(), name='votacaoblocosimb'), url(r'^sessao/(?P<pk>\d+)/votacao_bloco_expediente$', VotacaoEmBlocoExpediente.as_view(), name='votacao_bloco_expediente'), url(r'^sessao/(?P<pk>\d+)/resumo$', ResumoView.as_view(), name='resumo'), url(r'^sessao/(?P<pk>\d+)/resumo_ata$', ResumoAtaView.as_view(), name='resumo_ata'), url(r'^sessao/pesquisar-sessao$', PesquisarSessaoPlenariaView.as_view(), name='pesquisar_sessao'), url(r'^sessao/(?P<pk>\d+)/matordemdia/votnom/(?P<oid>\d+)/(?P<mid>\d+)$', VotacaoNominalView.as_view(), name='votacaonominal'), url(r'^sessao/(?P<pk>\d+)/matordemdia/votnom/edit/(?P<oid>\d+)/(?P<mid>\d+)$', VotacaoNominalEditView.as_view(), name='votacaonominaledit'), url(r'^sessao/(?P<pk>\d+)/matordemdia/votsec/(?P<oid>\d+)/(?P<mid>\d+)$', VotacaoView.as_view(), name='votacaosecreta'), url(r'^sessao/(?P<pk>\d+)/matordemdia/votsec/view/(?P<oid>\d+)/(?P<mid>\d+)$', VotacaoEditView.as_view(), name='votacaosecretaedit'), url(r'^sessao/(?P<pk>\d+)/matordemdia/votsimb/(?P<oid>\d+)/(?P<mid>\d+)$', VotacaoView.as_view(), name='votacaosimbolica'), url(r'^sessao/(?P<pk>\d+)/matordemdia/votsimbbloco/$', VotacaoView.as_view(), name='votacaosimbolicabloco'), url(r'^sessao/(?P<pk>\d+)/matordemdia/votsimb/view/(?P<oid>\d+)/(?P<mid>\d+)$', VotacaoEditView.as_view(), name='votacaosimbolicaedit'), url(r'^sessao/(?P<pk>\d+)/matexp/votnom/(?P<oid>\d+)/(?P<mid>\d+)$', VotacaoNominalExpedienteView.as_view(), name='votacaonominalexp'), url(r'^sessao/(?P<pk>\d+)/matexp/votnom/edit/(?P<oid>\d+)/(?P<mid>\d+)$', VotacaoNominalExpedienteEditView.as_view(), name='votacaonominalexpedit'), url(r'^sessao/(?P<pk>\d+)/matexp/votnom/detail/(?P<oid>\d+)/(?P<mid>\d+)$', VotacaoNominalExpedienteDetailView.as_view(), name='votacaonominalexpdetail'), url(r'^sessao/(?P<pk>\d+)/matexp/votsimb/(?P<oid>\d+)/(?P<mid>\d+)$', VotacaoExpedienteView.as_view(), name='votacaosimbolicaexp'), url(r'^sessao/(?P<pk>\d+)/matexp/votsimb/view/(?P<oid>\d+)/(?P<mid>\d+)$', VotacaoExpedienteEditView.as_view(), name='votacaosimbolicaexpedit'), url(r'^sessao/(?P<pk>\d+)/matexp/votsec/(?P<oid>\d+)/(?P<mid>\d+)$', VotacaoExpedienteView.as_view(), name='votacaosecretaexp'), url(r'^sessao/(?P<pk>\d+)/matexp/votsec/view/(?P<oid>\d+)/(?P<mid>\d+)$', VotacaoExpedienteEditView.as_view(), name='votacaosecretaexpedit'), url(r'^sessao/(?P<pk>\d+)/votacao-nominal-transparencia/(?P<oid>\d+)/(?P<mid>\d+)$', VotacaoNominalTransparenciaDetailView.as_view(), name='votacao_nominal_transparencia'), url(r'^sessao/(?P<pk>\d+)/votacao-simbolica-transparencia/(?P<oid>\d+)/(?P<mid>\d+)$', VotacaoSimbolicaTransparenciaDetailView.as_view(), name='votacao_simbolica_transparencia'), url(r'^sessao/mudar-ordem-materia-sessao/', mudar_ordem_materia_sessao, name='mudar_ordem_materia_sessao'), url(r'^sessao/(?P<pk>\d+)/matexp/leitura/(?P<oid>\d+)/(?P<mid>\d+)$', ExpedienteLeituraView.as_view(), name='leituraexp'), url(r'^sessao/(?P<pk>\d+)/matordemdia/leitura/(?P<oid>\d+)/(?P<mid>\d+)$', OrdemDiaLeituraView.as_view(), name='leituraod'), url(r'^sessao/(?P<pk>\d+)/(?P<iso>\d+)/(?P<oid>\d+)/retirar-leitura$', retirar_leitura, name='retirar_leitura'), url(r'^sessao/(?P<pk>\d+)/transf-mat-exp$', TransferenciaMateriasExpediente.as_view(), name="transf_mat_exp"), url(r'^sessao/(?P<pk>\d+)/transf-mat-ordemdia$', TransferenciaMateriasOrdemDia.as_view(), name="transf_mat_ordemdia"), ]<|fim▁end|>
url(r'^sistema/sessao-plenaria/tipo/', include(TipoSessaoCrud.get_urls())), url(r'^sistema/sessao-plenaria/tipo-resultado-votacao/', include(TipoResultadoVotacaoCrud.get_urls())),
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Configuration file for the Sphinx documentation builder. # # This file does only contain a selection of the most common options. For a # full list see the documentation: # http://www.sphinx-doc.org/en/master/config # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory,<|fim▁hole|># add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # # import os # import sys # sys.path.insert(0, os.path.abspath('.')) # -- Project information ----------------------------------------------------- project = 'msgiver' copyright = '2018, Tatsunori Nishikori' author = 'Tatsunori Nishikori' # The short X.Y version version = '0.1' # The full version, including alpha/beta/rc tags release = '0.1.7.1' # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.coverage', 'sphinx.ext.githubpages', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The master toctree document. master_doc = 'index' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path . exclude_patterns = ['build', 'Thumbs.db', '.DS_Store'] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'alabaster' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # # The default sidebars (for documents that don't match any pattern) are # defined by theme itself. Builtin themes are using these templates by # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # # html_sidebars = {} # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. htmlhelp_basename = 'msgiverdoc' # -- Options for LaTeX output ------------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'msgiver.tex', 'msgiver Documentation', 'Tatsunori Nishikori', 'manual'), ] # -- Options for manual page output ------------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'msgiver', 'msgiver Documentation', [author], 1) ] # -- Options for Texinfo output ---------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'msgiver', 'msgiver Documentation', author, 'msgiver', 'One line description of project.', 'Miscellaneous'), ]<|fim▁end|>
<|file_name|>chatsService.js<|end_file_name|><|fim▁begin|>appService.factory('Chats', function() { // Might use a resource here that returns a JSON array // Some fake testing data var chats = [{ id: 0, <|fim▁hole|> name: 'Ben Sparrow', lastText: 'You on your way?', face: 'app/view/common/img/ben.png' }, { id: 1, name: 'Max Lynx', lastText: 'Hey, it\'s me', face: 'app/view/common/img/max.png' }, { id: 2, name: 'Adam Bradleyson', lastText: 'I should buy a boat', face: 'app/view/common/img/adam.jpg' }, { id: 3, name: 'Perry Governor', lastText: 'Look at my mukluks!', face: 'app/view/common/img/perry.png' }, { id: 4, name: 'Mike Harrington', lastText: 'This is wicked good ice cream.', face: 'app/view/common/img/mike.png' }]; return { all: function() { return chats; }, remove: function(chat) { chats.splice(chats.indexOf(chat), 1); }, get: function(chatId) { for (var i = 0; i < chats.length; i++) { if (chats[i].id === parseInt(chatId)) { return chats[i]; } } return null; } }; });<|fim▁end|>
<|file_name|>instr_vfmadd132ss.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode}; use ::RegType::*; use ::instruction_def::*; use ::Operand::*; use ::Reg::*; use ::RegScale::*; use ::test::run_test; #[test] fn vfmadd132ss_1() { run_test(&Instruction { mnemonic: Mnemonic::VFMADD132SS, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM6)), operand3: Some(Direct(XMM1)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 73, 153, 241], OperandSize::Dword) } #[test] fn vfmadd132ss_2() { run_test(&Instruction { mnemonic: Mnemonic::VFMADD132SS, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM2)), operand3: Some(IndirectScaledDisplaced(EDX, Four, 352493850, Some(OperandSize::Dword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 105, 153, 12, 149, 26, 161, 2, 21], OperandSize::Dword) } #[test]<|fim▁hole|>} #[test] fn vfmadd132ss_4() { run_test(&Instruction { mnemonic: Mnemonic::VFMADD132SS, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM6)), operand3: Some(IndirectDisplaced(RCX, 1655863941, Some(OperandSize::Dword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 73, 153, 137, 133, 122, 178, 98], OperandSize::Qword) } #[test] fn vfmadd132ss_5() { run_test(&Instruction { mnemonic: Mnemonic::VFMADD132SS, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM6)), operand3: Some(Direct(XMM0)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Nearest), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 242, 77, 156, 153, 200], OperandSize::Dword) } #[test] fn vfmadd132ss_6() { run_test(&Instruction { mnemonic: Mnemonic::VFMADD132SS, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM0)), operand3: Some(IndirectScaledIndexed(ECX, EDI, Four, Some(OperandSize::Dword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 242, 125, 138, 153, 60, 185], OperandSize::Dword) } #[test] fn vfmadd132ss_7() { run_test(&Instruction { mnemonic: Mnemonic::VFMADD132SS, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM20)), operand3: Some(Direct(XMM19)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Zero), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 178, 93, 242, 153, 243], OperandSize::Qword) } #[test] fn vfmadd132ss_8() { run_test(&Instruction { mnemonic: Mnemonic::VFMADD132SS, operand1: Some(Direct(XMM17)), operand2: Some(Direct(XMM4)), operand3: Some(Indirect(RBX, Some(OperandSize::Dword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 226, 93, 143, 153, 11], OperandSize::Qword) }<|fim▁end|>
fn vfmadd132ss_3() { run_test(&Instruction { mnemonic: Mnemonic::VFMADD132SS, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM7)), operand3: Some(Direct(XMM4)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 65, 153, 196], OperandSize::Qword)
<|file_name|>preamble_patcher.cc<|end_file_name|><|fim▁begin|>// -*- Mode: C++; c-basic-offset: 2; indent-tabs-mode: nil -*- /* Copyright (c) 2007, Google Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * --- * Author: Joi Sigurdsson * Author: Scott Francis * * Implementation of PreamblePatcher */ #include "preamble_patcher.h" #include "mini_disassembler.h" // compatibility shims #include "base/logging.h" // Definitions of assembly statements we need #define ASM_JMP32REL 0xE9 #define ASM_INT3 0xCC #define ASM_JMP32ABS_0 0xFF #define ASM_JMP32ABS_1 0x25 #define ASM_JMP8REL 0xEB #define ASM_JCC32REL_0 0x0F #define ASM_JCC32REL_1_MASK 0x80 #define ASM_NOP 0x90 // X64 opcodes #define ASM_REXW 0x48 #define ASM_MOVRAX_IMM 0xB8 #define ASM_JMP 0xFF #define ASM_JMP_RAX 0xE0 namespace sidestep { PreamblePatcher::PreamblePage* PreamblePatcher::preamble_pages_ = NULL; long PreamblePatcher::granularity_ = 0; long PreamblePatcher::pagesize_ = 0; bool PreamblePatcher::initialized_ = false; static const unsigned int kPreamblePageMagic = 0x4347414D; // "MAGC" // Handle a special case that we see with functions that point into an // IAT table (including functions linked statically into the // application): these function already starts with ASM_JMP32*. For // instance, malloc() might be implemented as a JMP to __malloc(). // This function follows the initial JMPs for us, until we get to the // place where the actual code is defined. If we get to STOP_BEFORE, // we return the address before stop_before. The stop_before_trampoline // flag is used in 64-bit mode. If true, we will return the address // before a trampoline is detected. Trampolines are defined as: // // nop // mov rax, <replacement_function> // jmp rax // // See PreamblePatcher::RawPatchWithStub for more information. void* PreamblePatcher::ResolveTargetImpl(unsigned char* target, unsigned char* stop_before, bool stop_before_trampoline) { if (target == NULL) return NULL; while (1) { unsigned char* new_target; if (target[0] == ASM_JMP32REL) { // target[1-4] holds the place the jmp goes to, but it's // relative to the next instruction. int relative_offset; // Windows guarantees int is 4 bytes SIDESTEP_ASSERT(sizeof(relative_offset) == 4); memcpy(reinterpret_cast<void*>(&relative_offset), reinterpret_cast<void*>(target + 1), 4); new_target = target + 5 + relative_offset; } else if (target[0] == ASM_JMP8REL) { // Visual Studio 7.1 implements new[] as an 8 bit jump to new signed char relative_offset; memcpy(reinterpret_cast<void*>(&relative_offset), reinterpret_cast<void*>(target + 1), 1); new_target = target + 2 + relative_offset; } else if (target[0] == ASM_JMP32ABS_0 && target[1] == ASM_JMP32ABS_1) { jmp32rel: // Visual studio seems to sometimes do it this way instead of the // previous way. Not sure what the rules are, but it was happening // with operator new in some binaries. void** new_target_v; if (kIs64BitBinary) { // In 64-bit mode JMPs are RIP-relative, not absolute int target_offset; memcpy(reinterpret_cast<void*>(&target_offset), reinterpret_cast<void*>(target + 2), 4); new_target_v = reinterpret_cast<void**>(target + target_offset + 6); } else { SIDESTEP_ASSERT(sizeof(new_target) == 4); memcpy(&new_target_v, reinterpret_cast<void*>(target + 2), 4); } new_target = reinterpret_cast<unsigned char*>(*new_target_v); } else if (kIs64BitBinary && target[0] == ASM_REXW && target[1] == ASM_JMP32ABS_0 && target[2] == ASM_JMP32ABS_1) { // in Visual Studio 2012 we're seeing jump like that: // rex.W jmpq *0x11d019(%rip) // // according to docs I have, rex prefix is actually unneeded and // can be ignored. I.e. docs say for jumps like that operand // already defaults to 64-bit. But clearly it breaks abs. jump // detection above and we just skip rex target++; goto jmp32rel; } else { break; } if (new_target == stop_before) break; if (stop_before_trampoline && *new_target == ASM_NOP && new_target[1] == ASM_REXW && new_target[2] == ASM_MOVRAX_IMM) break; target = new_target; } return target; } // Special case scoped_ptr to avoid dependency on scoped_ptr below. class DeleteUnsignedCharArray { public: DeleteUnsignedCharArray(unsigned char* array) : array_(array) { } ~DeleteUnsignedCharArray() { if (array_) { PreamblePatcher::FreePreambleBlock(array_); } } unsigned char* Release() { unsigned char* temp = array_; array_ = NULL; return temp; } private: unsigned char* array_; }; SideStepError PreamblePatcher::RawPatchWithStubAndProtections( void* target_function, void *replacement_function, unsigned char* preamble_stub, unsigned long stub_size, unsigned long* bytes_needed) { // We need to be able to write to a process-local copy of the first // MAX_PREAMBLE_STUB_SIZE bytes of target_function DWORD old_target_function_protect = 0; BOOL succeeded = ::VirtualProtect(reinterpret_cast<void*>(target_function), MAX_PREAMBLE_STUB_SIZE, PAGE_EXECUTE_READWRITE, &old_target_function_protect); if (!succeeded) { SIDESTEP_ASSERT(false && "Failed to make page containing target function " "copy-on-write."); return SIDESTEP_ACCESS_DENIED; } SideStepError error_code = RawPatchWithStub(target_function, replacement_function, preamble_stub, stub_size, bytes_needed); // Restore the protection of the first MAX_PREAMBLE_STUB_SIZE bytes of // pTargetFunction to what they were before we started goofing around. // We do this regardless of whether the patch succeeded or not. succeeded = ::VirtualProtect(reinterpret_cast<void*>(target_function), MAX_PREAMBLE_STUB_SIZE, old_target_function_protect, &old_target_function_protect); if (!succeeded) { SIDESTEP_ASSERT(false && "Failed to restore protection to target function."); // We must not return an error here because the function has // likely actually been patched, and returning an error might // cause our client code not to unpatch it. So we just keep // going. } if (SIDESTEP_SUCCESS != error_code) { // Testing RawPatchWithStub, above SIDESTEP_ASSERT(false); return error_code; } // Flush the instruction cache to make sure the processor doesn't execute the // old version of the instructions (before our patch). // // FlushInstructionCache is actually a no-op at least on // single-processor XP machines. I'm not sure why this is so, but // it is, yet I want to keep the call to the API here for // correctness in case there is a difference in some variants of // Windows/hardware. succeeded = ::FlushInstructionCache(::GetCurrentProcess(), target_function, MAX_PREAMBLE_STUB_SIZE); if (!succeeded) { SIDESTEP_ASSERT(false && "Failed to flush instruction cache."); // We must not return an error here because the function has actually // been patched, and returning an error would likely cause our client // code not to unpatch it. So we just keep going. } return SIDESTEP_SUCCESS; } SideStepError PreamblePatcher::RawPatch(void* target_function, void* replacement_function, void** original_function_stub) { if (!target_function || !replacement_function || !original_function_stub || (*original_function_stub) || target_function == replacement_function) { SIDESTEP_ASSERT(false && "Preconditions not met"); return SIDESTEP_INVALID_PARAMETER; } BOOL succeeded = FALSE; // First, deal with a special case that we see with functions that // point into an IAT table (including functions linked statically // into the application): these function already starts with // ASM_JMP32REL. For instance, malloc() might be implemented as a // JMP to __malloc(). In that case, we replace the destination of // the JMP (__malloc), rather than the JMP itself (malloc). This // way we get the correct behavior no matter how malloc gets called. void* new_target = ResolveTarget(target_function); if (new_target != target_function) { target_function = new_target; } // In 64-bit mode, preamble_stub must be within 2GB of target function // so that if target contains a jump, we can translate it. unsigned char* preamble_stub = AllocPreambleBlockNear(target_function); if (!preamble_stub) { SIDESTEP_ASSERT(false && "Unable to allocate preamble-stub."); return SIDESTEP_INSUFFICIENT_BUFFER; } // Frees the array at end of scope. DeleteUnsignedCharArray guard_preamble_stub(preamble_stub); SideStepError error_code = RawPatchWithStubAndProtections( target_function, replacement_function, preamble_stub, MAX_PREAMBLE_STUB_SIZE, NULL); if (SIDESTEP_SUCCESS != error_code) { SIDESTEP_ASSERT(false); return error_code; } // Flush the instruction cache to make sure the processor doesn't execute the // old version of the instructions (before our patch). // // FlushInstructionCache is actually a no-op at least on // single-processor XP machines. I'm not sure why this is so, but // it is, yet I want to keep the call to the API here for // correctness in case there is a difference in some variants of // Windows/hardware. succeeded = ::FlushInstructionCache(::GetCurrentProcess(), target_function, MAX_PREAMBLE_STUB_SIZE); if (!succeeded) { SIDESTEP_ASSERT(false && "Failed to flush instruction cache."); // We must not return an error here because the function has actually // been patched, and returning an error would likely cause our client // code not to unpatch it. So we just keep going. } SIDESTEP_LOG("PreamblePatcher::RawPatch successfully patched."); // detach the scoped pointer so the memory is not freed *original_function_stub = reinterpret_cast<void*>(guard_preamble_stub.Release()); return SIDESTEP_SUCCESS; } SideStepError PreamblePatcher::Unpatch(void* target_function, void* replacement_function, void* original_function_stub) { SIDESTEP_ASSERT(target_function && replacement_function && original_function_stub); if (!target_function || !replacement_function || !original_function_stub) { return SIDESTEP_INVALID_PARAMETER; } // Before unpatching, target_function should be a JMP to // replacement_function. If it's not, then either it's an error, or // we're falling into the case where the original instruction was a // JMP, and we patched the jumped_to address rather than the JMP // itself. (For instance, if malloc() is just a JMP to __malloc(), // we patched __malloc() and not malloc().) unsigned char* target = reinterpret_cast<unsigned char*>(target_function); target = reinterpret_cast<unsigned char*>( ResolveTargetImpl( target, reinterpret_cast<unsigned char*>(replacement_function), true)); // We should end at the function we patched. When we patch, we insert // a ASM_JMP32REL instruction, so look for that as a sanity check. if (target[0] != ASM_JMP32REL) { SIDESTEP_ASSERT(false && "target_function does not look like it was patched."); return SIDESTEP_INVALID_PARAMETER; } const unsigned int kRequiredTargetPatchBytes = 5; // We need to be able to write to a process-local copy of the first // kRequiredTargetPatchBytes bytes of target_function DWORD old_target_function_protect = 0; BOOL succeeded = ::VirtualProtect(reinterpret_cast<void*>(target), kRequiredTargetPatchBytes, PAGE_EXECUTE_READWRITE, &old_target_function_protect); if (!succeeded) { SIDESTEP_ASSERT(false && "Failed to make page containing target function " "copy-on-write."); return SIDESTEP_ACCESS_DENIED; } unsigned char* preamble_stub = reinterpret_cast<unsigned char*>( original_function_stub); // Disassemble the preamble of stub and copy the bytes back to target. // If we've done any conditional jumps in the preamble we need to convert // them back to the original REL8 jumps in the target. MiniDisassembler disassembler; unsigned int preamble_bytes = 0; unsigned int target_bytes = 0; while (target_bytes < kRequiredTargetPatchBytes) { unsigned int cur_bytes = 0; InstructionType instruction_type = disassembler.Disassemble(preamble_stub + preamble_bytes, cur_bytes); if (IT_JUMP == instruction_type) { unsigned int jump_bytes = 0; SideStepError jump_ret = SIDESTEP_JUMP_INSTRUCTION; if (IsNearConditionalJump(preamble_stub + preamble_bytes, cur_bytes) || IsNearRelativeJump(preamble_stub + preamble_bytes, cur_bytes) || IsNearAbsoluteCall(preamble_stub + preamble_bytes, cur_bytes) || IsNearRelativeCall(preamble_stub + preamble_bytes, cur_bytes)) { jump_ret = PatchNearJumpOrCall(preamble_stub + preamble_bytes, cur_bytes, target + target_bytes, &jump_bytes, MAX_PREAMBLE_STUB_SIZE); } if (jump_ret == SIDESTEP_JUMP_INSTRUCTION) { SIDESTEP_ASSERT(false && "Found unsupported jump instruction in stub!!"); return SIDESTEP_UNSUPPORTED_INSTRUCTION; } target_bytes += jump_bytes; } else if (IT_GENERIC == instruction_type) { if (IsMovWithDisplacement(preamble_stub + preamble_bytes, cur_bytes)) { unsigned int mov_bytes = 0; if (PatchMovWithDisplacement(preamble_stub + preamble_bytes, cur_bytes, target + target_bytes, &mov_bytes, MAX_PREAMBLE_STUB_SIZE) != SIDESTEP_SUCCESS) { SIDESTEP_ASSERT(false && "Found unsupported generic instruction in stub!!"); return SIDESTEP_UNSUPPORTED_INSTRUCTION; } } else { memcpy(reinterpret_cast<void*>(target + target_bytes), reinterpret_cast<void*>(reinterpret_cast<unsigned char*>( original_function_stub) + preamble_bytes), cur_bytes); target_bytes += cur_bytes; } } else { SIDESTEP_ASSERT(false && "Found unsupported instruction in stub!!"); return SIDESTEP_UNSUPPORTED_INSTRUCTION; } preamble_bytes += cur_bytes; } FreePreambleBlock(reinterpret_cast<unsigned char*>(original_function_stub)); // Restore the protection of the first kRequiredTargetPatchBytes bytes of // target to what they were before we started goofing around. succeeded = ::VirtualProtect(reinterpret_cast<void*>(target), kRequiredTargetPatchBytes, old_target_function_protect, &old_target_function_protect); // Flush the instruction cache to make sure the processor doesn't execute the // old version of the instructions (before our patch). // // See comment on FlushInstructionCache elsewhere in this file. succeeded = ::FlushInstructionCache(::GetCurrentProcess(), target, MAX_PREAMBLE_STUB_SIZE); if (!succeeded) { SIDESTEP_ASSERT(false && "Failed to flush instruction cache."); return SIDESTEP_UNEXPECTED; } SIDESTEP_LOG("PreamblePatcher::Unpatch successfully unpatched."); return SIDESTEP_SUCCESS; } void PreamblePatcher::Initialize() { if (!initialized_) { SYSTEM_INFO si = { 0 }; ::GetSystemInfo(&si); granularity_ = si.dwAllocationGranularity; pagesize_ = si.dwPageSize; initialized_ = true; } } unsigned char* PreamblePatcher::AllocPreambleBlockNear(void* target) { PreamblePage* preamble_page = preamble_pages_; while (preamble_page != NULL) { if (preamble_page->free_ != NULL) { __int64 val = reinterpret_cast<__int64>(preamble_page) - reinterpret_cast<__int64>(target); if ((val > 0 && val + pagesize_ <= INT_MAX) || (val < 0 && val >= INT_MIN)) { break; } } preamble_page = preamble_page->next_; } // The free_ member of the page is used to store the next available block // of memory to use or NULL if there are no chunks available, in which case // we'll allocate a new page. if (preamble_page == NULL || preamble_page->free_ == NULL) { // Create a new preamble page and initialize the free list preamble_page = reinterpret_cast<PreamblePage*>(AllocPageNear(target)); SIDESTEP_ASSERT(preamble_page != NULL && "Could not allocate page!"); void** pp = &preamble_page->free_; unsigned char* ptr = reinterpret_cast<unsigned char*>(preamble_page) + MAX_PREAMBLE_STUB_SIZE; unsigned char* limit = reinterpret_cast<unsigned char*>(preamble_page) + pagesize_; while (ptr < limit) { *pp = ptr; pp = reinterpret_cast<void**>(ptr); ptr += MAX_PREAMBLE_STUB_SIZE; } *pp = NULL; // Insert the new page into the list preamble_page->magic_ = kPreamblePageMagic; preamble_page->next_ = preamble_pages_; preamble_pages_ = preamble_page; } unsigned char* ret = reinterpret_cast<unsigned char*>(preamble_page->free_); preamble_page->free_ = *(reinterpret_cast<void**>(preamble_page->free_)); return ret; } void PreamblePatcher::FreePreambleBlock(unsigned char* block) { SIDESTEP_ASSERT(block != NULL); SIDESTEP_ASSERT(granularity_ != 0); uintptr_t ptr = reinterpret_cast<uintptr_t>(block); ptr -= ptr & (granularity_ - 1); PreamblePage* preamble_page = reinterpret_cast<PreamblePage*>(ptr); SIDESTEP_ASSERT(preamble_page->magic_ == kPreamblePageMagic); *(reinterpret_cast<void**>(block)) = preamble_page->free_; preamble_page->free_ = block; } void* PreamblePatcher::AllocPageNear(void* target) { MEMORY_BASIC_INFORMATION mbi = { 0 }; if (!::VirtualQuery(target, &mbi, sizeof(mbi))) { SIDESTEP_ASSERT(false && "VirtualQuery failed on target address"); return 0; } if (initialized_ == false) { PreamblePatcher::Initialize(); SIDESTEP_ASSERT(initialized_); } void* pv = NULL; unsigned char* allocation_base = reinterpret_cast<unsigned char*>( mbi.AllocationBase); __int64 i = 1; bool high_target = reinterpret_cast<__int64>(target) > UINT_MAX; while (pv == NULL) { __int64 val = reinterpret_cast<__int64>(allocation_base) - (i * granularity_); if (high_target && reinterpret_cast<__int64>(target) - val > INT_MAX) { // We're further than 2GB from the target break; } else if (val <= 0) { // Less than 0 break; } pv = ::VirtualAlloc(reinterpret_cast<void*>(allocation_base - (i++ * granularity_)), pagesize_, MEM_COMMIT | MEM_RESERVE, PAGE_EXECUTE_READWRITE); } // We couldn't allocate low, try to allocate high if (pv == NULL) { i = 1; // Round up to the next multiple of page granularity allocation_base = reinterpret_cast<unsigned char*>( (reinterpret_cast<__int64>(target) & (~(granularity_ - 1))) + granularity_); while (pv == NULL) { __int64 val = reinterpret_cast<__int64>(allocation_base) + (i * granularity_) - reinterpret_cast<__int64>(target); if (val > INT_MAX || val < 0) { // We're too far or we overflowed break; } pv = ::VirtualAlloc(reinterpret_cast<void*>(allocation_base + (i++ * granularity_)), pagesize_, MEM_COMMIT | MEM_RESERVE, PAGE_EXECUTE_READWRITE); } } return pv; } bool PreamblePatcher::IsShortConditionalJump( unsigned char* target, unsigned int instruction_size) { return (*(target) & 0x70) == 0x70 && instruction_size == 2; } bool PreamblePatcher::IsShortJump( unsigned char* target, unsigned int instruction_size) { return target[0] == 0xeb && instruction_size == 2; } bool PreamblePatcher::IsNearConditionalJump( unsigned char* target, unsigned int instruction_size) { return *(target) == 0xf && (*(target + 1) & 0x80) == 0x80 && instruction_size == 6; } bool PreamblePatcher::IsNearRelativeJump( unsigned char* target, unsigned int instruction_size) { return *(target) == 0xe9 && instruction_size == 5; } bool PreamblePatcher::IsNearAbsoluteCall( unsigned char* target, unsigned int instruction_size) { return *(target) == 0xff && (*(target + 1) & 0x10) == 0x10 && instruction_size == 6; } bool PreamblePatcher::IsNearRelativeCall( unsigned char* target, unsigned int instruction_size) { return *(target) == 0xe8 && instruction_size == 5; } bool PreamblePatcher::IsMovWithDisplacement( unsigned char* target, unsigned int instruction_size) { // In this case, the ModRM byte's mod field will be 0 and r/m will be 101b (5) return instruction_size == 7 && *target == 0x48 && *(target + 1) == 0x8b && (*(target + 2) >> 6) == 0 && (*(target + 2) & 0x7) == 5; } SideStepError PreamblePatcher::PatchShortConditionalJump( unsigned char* source, unsigned int instruction_size, unsigned char* target, unsigned int* target_bytes, unsigned int target_size) { // note: rel8 offset is signed. Thus we need to ask for signed char // to negative offsets right unsigned char* original_jump_dest = (source + 2) + static_cast<signed char>(source[1]); unsigned char* stub_jump_from = target + 6; __int64 fixup_jump_offset = original_jump_dest - stub_jump_from; if (fixup_jump_offset > INT_MAX || fixup_jump_offset < INT_MIN) { SIDESTEP_ASSERT(false && "Unable to fix up short jump because target" " is too far away."); return SIDESTEP_JUMP_INSTRUCTION; } *target_bytes = 6; if (target_size > *target_bytes) { // Convert the short jump to a near jump. // // 0f 8x xx xx xx xx = Jcc rel32off unsigned short jmpcode = ((0x80 | (source[0] & 0xf)) << 8) | 0x0f; memcpy(reinterpret_cast<void*>(target), reinterpret_cast<void*>(&jmpcode), 2); memcpy(reinterpret_cast<void*>(target + 2), reinterpret_cast<void*>(&fixup_jump_offset), 4); } return SIDESTEP_SUCCESS; } SideStepError PreamblePatcher::PatchShortJump( unsigned char* source, unsigned int instruction_size,<|fim▁hole|> // note: rel8 offset is _signed_. Thus we need signed char here. unsigned char* original_jump_dest = (source + 2) + static_cast<signed char>(source[1]); unsigned char* stub_jump_from = target + 5; __int64 fixup_jump_offset = original_jump_dest - stub_jump_from; if (fixup_jump_offset > INT_MAX || fixup_jump_offset < INT_MIN) { SIDESTEP_ASSERT(false && "Unable to fix up short jump because target" " is too far away."); return SIDESTEP_JUMP_INSTRUCTION; } *target_bytes = 5; if (target_size > *target_bytes) { // Convert the short jump to a near jump. // // e9 xx xx xx xx = jmp rel32off target[0] = 0xe9; memcpy(reinterpret_cast<void*>(target + 1), reinterpret_cast<void*>(&fixup_jump_offset), 4); } return SIDESTEP_SUCCESS; } SideStepError PreamblePatcher::PatchNearJumpOrCall( unsigned char* source, unsigned int instruction_size, unsigned char* target, unsigned int* target_bytes, unsigned int target_size) { SIDESTEP_ASSERT(instruction_size == 5 || instruction_size == 6); unsigned int jmp_offset_in_instruction = instruction_size == 5 ? 1 : 2; unsigned char* original_jump_dest = reinterpret_cast<unsigned char *>( reinterpret_cast<__int64>(source + instruction_size) + *(reinterpret_cast<int*>(source + jmp_offset_in_instruction))); unsigned char* stub_jump_from = target + instruction_size; __int64 fixup_jump_offset = original_jump_dest - stub_jump_from; if (fixup_jump_offset > INT_MAX || fixup_jump_offset < INT_MIN) { SIDESTEP_ASSERT(false && "Unable to fix up near jump because target" " is too far away."); return SIDESTEP_JUMP_INSTRUCTION; } if ((fixup_jump_offset < SCHAR_MAX && fixup_jump_offset > SCHAR_MIN)) { *target_bytes = 2; if (target_size > *target_bytes) { // If the new offset is in range, use a short jump instead of a near jump. if (source[0] == ASM_JCC32REL_0 && (source[1] & ASM_JCC32REL_1_MASK) == ASM_JCC32REL_1_MASK) { unsigned short jmpcode = (static_cast<unsigned char>( fixup_jump_offset) << 8) | (0x70 | (source[1] & 0xf)); memcpy(reinterpret_cast<void*>(target), reinterpret_cast<void*>(&jmpcode), 2); } else { target[0] = ASM_JMP8REL; target[1] = static_cast<unsigned char>(fixup_jump_offset); } } } else { *target_bytes = instruction_size; if (target_size > *target_bytes) { memcpy(reinterpret_cast<void*>(target), reinterpret_cast<void*>(source), jmp_offset_in_instruction); memcpy(reinterpret_cast<void*>(target + jmp_offset_in_instruction), reinterpret_cast<void*>(&fixup_jump_offset), 4); } } return SIDESTEP_SUCCESS; } SideStepError PreamblePatcher::PatchMovWithDisplacement( unsigned char* source, unsigned int instruction_size, unsigned char* target, unsigned int* target_bytes, unsigned int target_size) { SIDESTEP_ASSERT(instruction_size == 7); const int mov_offset_in_instruction = 3; // 0x48 0x8b 0x0d <offset> unsigned char* original_mov_dest = reinterpret_cast<unsigned char*>( reinterpret_cast<__int64>(source + instruction_size) + *(reinterpret_cast<int*>(source + mov_offset_in_instruction))); unsigned char* stub_mov_from = target + instruction_size; __int64 fixup_mov_offset = original_mov_dest - stub_mov_from; if (fixup_mov_offset > INT_MAX || fixup_mov_offset < INT_MIN) { SIDESTEP_ASSERT(false && "Unable to fix up near MOV because target is too far away."); return SIDESTEP_UNEXPECTED; } *target_bytes = instruction_size; if (target_size > *target_bytes) { memcpy(reinterpret_cast<void*>(target), reinterpret_cast<void*>(source), mov_offset_in_instruction); memcpy(reinterpret_cast<void*>(target + mov_offset_in_instruction), reinterpret_cast<void*>(&fixup_mov_offset), 4); } return SIDESTEP_SUCCESS; } }; // namespace sidestep<|fim▁end|>
unsigned char* target, unsigned int* target_bytes, unsigned int target_size) {
<|file_name|>updateCategory.js<|end_file_name|><|fim▁begin|>let fs = require("fs"); let path = require("path"); let cp = require("child_process"); function runCommand(folder, args) { cp.spawn("npm", args, { env: process.env, cwd: folder, stdio: "inherit" }); } function getPackages(category) { let folder = path.join(__dirname, category); return fs .readdirSync(folder) .map(function(dir) { let fullPath = path.join(folder, dir); // check for a package.json file if (!fs.existsSync(path.join(fullPath, "package.json"))) { return; } return fullPath; }) .filter(function(pkg) { return pkg !== undefined; }); } function runCommandInCategory(category, args) { let pkgs = getPackages(category); pkgs.forEach(function(pkg) { runCommand(pkg, args); }); } let CATEGORIES = ["react", "vue", "svelte", "misc"]; let category = process.argv[2]; let args = process.argv.slice(3); if (category === "all") { CATEGORIES.forEach(function(c) {<|fim▁hole|>}<|fim▁end|>
runCommandInCategory(c, args); }); } else { runCommandInCategory(category, args);
<|file_name|>styles.js<|end_file_name|><|fim▁begin|>import {StyleSheet} from 'react-native' const styles = StyleSheet.create({ container: { flex: 1 } })<|fim▁hole|> export default styles<|fim▁end|>
<|file_name|>app.js<|end_file_name|><|fim▁begin|>var roshamboApp = angular.module('roshamboApp', []), roshambo= [ { name:'Rock', src:'img/rock.png' }, { name:'Paper', src:'img/paper.png' }, { name:'Scissors', src:'img/scissors.png' } ], roshamboMap=roshambo.reduce(function(roshamboMap,thro){ roshamboMap[thro.name.toLowerCase()]=thro.src; return roshamboMap; },{}); roshamboApp.controller('RoshamboCtrl', function ($scope,$http) { $scope.roshambo=roshambo; $scope.selection=roshambo[0]; $scope.outcome=void 0; $scope.selectThrow=function(selected){ $scope.outcome=void 0; $scope.selection=selected; }; $scope.throwSelected=function(){ $http.post('http://localhost:8080/api/throw',{playerThrow:$scope.selection.name}) .then(function(successResponse){ $scope.outcome=successResponse.data; $scope.outcome.playerSrc=roshamboMap[$scope.outcome.playerThrow]; $scope.outcome.opponentSrc=roshamboMap[$scope.outcome.opponentThrow]; $scope.outcome.announce=function(){ if($scope.outcome.outcome==='draw'){ return 'It\'s a Draw!'; }else{ return $scope.outcome.outcome.charAt(0).toUpperCase()+$scope.outcome.outcome.slice(1)+' Wins!';<|fim▁hole|> alert('Error!'); console.log('Caught error posting throw:\n%s',JSON.stringify(errorResponse,null,2)); }); }; });<|fim▁end|>
} } },function(errorResponse){
<|file_name|>view.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # coding=utf-8 __author__ = 'pyphrb' from .import index from flask import render_template from model import NameFrom<|fim▁hole|> name = None form = NameFrom() if form.validate_on_submit(): name = form.name.data form.name.data = '' return render_template('index/index.html', form=form, name=name)<|fim▁end|>
@index.route('/', methods=['GET', 'POST']) def index():
<|file_name|>btree_builder.rs<|end_file_name|><|fim▁begin|>use anyhow::Result; use byteorder::{LittleEndian, WriteBytesExt}; use std::collections::VecDeque; use std::io::Cursor; use std::sync::{Arc, Mutex}; use crate::checksum; use crate::io_engine::*; use crate::pdata::btree::*; use crate::pdata::space_map::*; use crate::pdata::unpack::*; use crate::write_batcher::*; //------------------------------------------ /// A little ref counter abstraction. Used to manage counts for btree /// values (eg, the block/time in a thin mapping tree). pub trait RefCounter<Value> { fn get(&self, v: &Value) -> Result<u32>; fn inc(&mut self, v: &Value) -> Result<()>; fn dec(&mut self, v: &Value) -> Result<()>; } pub struct NoopRC {} impl<Value> RefCounter<Value> for NoopRC { fn get(&self, _v: &Value) -> Result<u32> { Ok(0) } fn inc(&mut self, _v: &Value) -> Result<()> { Ok(()) } fn dec(&mut self, _v: &Value) -> Result<()> { Ok(()) } } /// Wraps a space map up to become a RefCounter. pub struct SMRefCounter { sm: Arc<Mutex<dyn SpaceMap>>, } impl SMRefCounter { pub fn new(sm: Arc<Mutex<dyn SpaceMap>>) -> SMRefCounter { SMRefCounter { sm } } } impl RefCounter<u64> for SMRefCounter { fn get(&self, v: &u64) -> Result<u32> { self.sm.lock().unwrap().get(*v) } fn inc(&mut self, v: &u64) -> Result<()> { self.sm.lock().unwrap().inc(*v, 1) } fn dec(&mut self, v: &u64) -> Result<()> { self.sm.lock().unwrap().dec(*v)?; Ok(()) } } //------------------------------------------ // Building a btree for a given set of values is straight forward. // But often we want to merge shared subtrees into the btree we're // building, which _is_ complicated. Requiring rebalancing of nodes, // and careful copy-on-write operations so we don't disturb the shared // subtree. // // To avoid these problems this code never produces shared internal nodes. // With the large fan out of btrees this isn't really a problem; we'll // allocate more nodes than optimum, but not many compared to the number // of leaves. Also we can pack the leaves much better than the kernel // does due to out of order insertions. // // There are thus two stages to building a btree. // // i) Produce a list of populated leaves. These leaves may well be shared. // ii) Build the upper levels of the btree above the leaves. //------------------------------------------ /// Pack the given node ready to write to disk. pub fn pack_node<W: WriteBytesExt, V: Pack + Unpack>(node: &Node<V>, w: &mut W) -> Result<()> { match node { Node::Internal { header, keys, values, } => { header.pack(w)?; for k in keys { w.write_u64::<LittleEndian>(*k)?; } // pad with zeroes for _i in keys.len()..header.max_entries as usize { w.write_u64::<LittleEndian>(0)?; } for v in values { v.pack(w)?; } } Node::Leaf { header, keys, values, } => { header.pack(w)?; for k in keys { w.write_u64::<LittleEndian>(*k)?; } // pad with zeroes for _i in keys.len()..header.max_entries as usize { w.write_u64::<LittleEndian>(0)?; } for v in values { v.pack(w)?; } } } Ok(()) } //------------------------------------------ pub fn calc_max_entries<V: Unpack>() -> usize { let elt_size = 8 + V::disk_size() as usize; let total = ((BLOCK_SIZE - NodeHeader::disk_size() as usize) / elt_size) as usize; total / 3 * 3 } pub struct WriteResult { first_key: u64, loc: u64, } /// Write a node to a free metadata block. fn write_node_<V: Unpack + Pack>(w: &mut WriteBatcher, mut node: Node<V>) -> Result<WriteResult> { let keys = node.get_keys(); let first_key = *keys.first().unwrap_or(&0u64); let b = w.alloc()?; node.set_block(b.loc); let mut cursor = Cursor::new(b.get_data()); pack_node(&node, &mut cursor)?; let loc = b.loc; w.write(b, checksum::BT::NODE)?; Ok(WriteResult { first_key, loc }) } /// A node writer takes a Vec of values and packs them into /// a btree node. It's up to the specific implementation to /// decide if it produces internal or leaf nodes. pub trait NodeIO<V: Unpack + Pack> { fn write(&self, w: &mut WriteBatcher, keys: Vec<u64>, values: Vec<V>) -> Result<WriteResult>; fn read(&self, w: &mut WriteBatcher, block: u64) -> Result<(Vec<u64>, Vec<V>)>; } pub struct LeafIO {} impl<V: Unpack + Pack> NodeIO<V> for LeafIO { fn write(&self, w: &mut WriteBatcher, keys: Vec<u64>, values: Vec<V>) -> Result<WriteResult> { let header = NodeHeader { block: 0, is_leaf: true, nr_entries: keys.len() as u32, max_entries: calc_max_entries::<V>() as u32, value_size: V::disk_size(),<|fim▁hole|> let node = Node::Leaf { header, keys, values, }; write_node_(w, node) } fn read(&self, w: &mut WriteBatcher, block: u64) -> Result<(Vec<u64>, Vec<V>)> { let b = w.read(block)?; let path = Vec::new(); match unpack_node::<V>(&path, b.get_data(), true, true)? { Node::Internal { .. } => { panic!("unexpected internal node"); } Node::Leaf { keys, values, .. } => Ok((keys, values)), } } } struct InternalIO {} impl NodeIO<u64> for InternalIO { fn write(&self, w: &mut WriteBatcher, keys: Vec<u64>, values: Vec<u64>) -> Result<WriteResult> { let header = NodeHeader { block: 0, is_leaf: false, nr_entries: keys.len() as u32, max_entries: calc_max_entries::<u64>() as u32, value_size: u64::disk_size(), }; let node: Node<u64> = Node::Internal { header, keys, values, }; write_node_(w, node) } fn read(&self, w: &mut WriteBatcher, block: u64) -> Result<(Vec<u64>, Vec<u64>)> { let b = w.read(block)?; let path = Vec::new(); match unpack_node::<u64>(&path, b.get_data(), true, true)? { Node::Internal { keys, values, .. } => Ok((keys, values)), Node::Leaf { .. } => { panic!("unexpected leaf node"); } } } } //------------------------------------------ /// This takes a sequence of values or nodes, and builds a vector of leaf nodes. /// Care is taken to make sure that all nodes are at least half full unless there's /// only a single node. pub struct NodeBuilder<V: Pack + Unpack> { nio: Box<dyn NodeIO<V>>, value_rc: Box<dyn RefCounter<V>>, max_entries_per_node: usize, values: VecDeque<(u64, V)>, nodes: Vec<NodeSummary>, shared: bool, } /// When the builder is including pre-built nodes it has to decide whether /// to use the node as given, or read it and import the values directly /// for balancing reasons. This struct is used to stop us re-reading /// the NodeHeaders of nodes that are shared multiple times. #[derive(Clone)] pub struct NodeSummary { block: u64, key: u64, nr_entries: usize, /// This node was passed in pre-built. Important for deciding if /// we need to adjust the ref counts if we unpack. shared: bool, } impl<'a, V: Pack + Unpack + Clone> NodeBuilder<V> { /// Create a new NodeBuilder pub fn new(nio: Box<dyn NodeIO<V>>, value_rc: Box<dyn RefCounter<V>>, shared: bool) -> Self { NodeBuilder { nio, value_rc, max_entries_per_node: calc_max_entries::<V>(), values: VecDeque::new(), nodes: Vec::new(), shared, } } /// Push a single value. This may emit a new node, hence the Result /// return type. The value's ref count will be incremented. pub fn push_value(&mut self, w: &mut WriteBatcher, key: u64, val: V) -> Result<()> { // Unshift the previously pushed node since it is not the root let half_full = self.max_entries_per_node / 2; if self.nodes.len() == 1 && (self.nodes.last().unwrap().nr_entries < half_full) { self.unshift_node(w)?; } // Have we got enough values to emit a node? We try and keep // at least max_entries_per_node entries unflushed so we // can ensure the final node is balanced properly. else if self.values.len() == self.max_entries_per_node * 2 { self.emit_node(w)?; } self.value_rc.inc(&val)?; self.values.push_back((key, val)); Ok(()) } // To avoid writing an under populated node we have to grab some // values from the first of the shared nodes. fn append_values(&mut self, w: &mut WriteBatcher, node: &NodeSummary) -> Result<()> { let (keys, values) = self.read_node(w, node.block)?; for i in 0..keys.len() { self.value_rc.inc(&values[i])?; self.values.push_back((keys[i], values[i].clone())); } Ok(()) } /// Push a number of prebuilt, shared nodes. The builder may decide to not /// use a shared node, instead reading the values and packing them /// directly. This may do IO to emit nodes, so returns a Result. /// Any shared nodes that are used have their block incremented in /// the space map. Will only increment the ref count for values /// contained in the nodes if it unpacks them. pub fn push_nodes(&mut self, w: &mut WriteBatcher, nodes: &[NodeSummary]) -> Result<()> { assert!(!nodes.is_empty()); // Assume that the node is a shared root if it is the first comer. // A rooted leaf could have any number of entries. let maybe_root = (nodes.len() == 1) && self.nodes.is_empty() && self.values.is_empty(); if maybe_root { let n = &nodes[0]; w.sm.lock().unwrap().inc(n.block, 1)?; self.nodes.push(n.clone()); return Ok(()); } // As a sanity check we make sure that all the shared nodes contain the // minimum nr of entries. // A single shared node could be possibly under populated (less than half-full) // due to btree removal, or even underfull (<33% residency) due to kernel issues. // Those kinds of nodes will be merged into their siblings. let half_full = self.max_entries_per_node / 2; if nodes.len() > 1 { for n in nodes { if n.nr_entries < half_full { panic!("under populated node"); } } } // Unshift the previously pushed node since it is not the root if self.nodes.len() == 1 && (self.nodes.last().unwrap().nr_entries < half_full) { self.unshift_node(w)?; } // Decide if we're going to use the pre-built nodes. if !self.values.is_empty() && (self.values.len() < half_full) { let mut nodes_iter = nodes.iter(); let n = nodes_iter.next(); self.append_values(w, n.unwrap())?; // Do not flush if there's no succeeding nodes, // so that it could produce a more compact metadata. if nodes.len() > 1 { // Flush all the values. self.emit_all(w)?; // Add the remaining nodes. for n in nodes_iter { w.sm.lock().unwrap().inc(n.block, 1)?; self.nodes.push(n.clone()); } } } else { // Flush all the values. self.emit_all(w)?; if nodes[0].nr_entries < half_full { // An under populated nodes[0] implies nodes.len() == 1, // and that has to be merged into their siblings. self.append_values(w, &nodes[0])?; } else { // Add the nodes. for n in nodes { w.sm.lock().unwrap().inc(n.block, 1)?; self.nodes.push(n.clone()); } } } Ok(()) } /// Signal that no more values or nodes will be pushed. Returns a /// vector of the built nodes. Consumes the builder. pub fn complete(mut self, w: &mut WriteBatcher) -> Result<Vec<NodeSummary>> { let half_full = self.max_entries_per_node / 2; if !self.values.is_empty() && (self.values.len() < half_full) && !self.nodes.is_empty() { // We don't have enough values to emit a node. So we're going to // have to rebalance with the previous node. self.unshift_node(w)?; } self.emit_all(w)?; if self.nodes.is_empty() { self.emit_empty_leaf(w)? } Ok(self.nodes) } //------------------------- // We're only interested in the keys and values from the node, and // not whether it's a leaf or internal node. fn read_node(&self, w: &mut WriteBatcher, block: u64) -> Result<(Vec<u64>, Vec<V>)> { self.nio.read(w, block) } /// Writes a node with the first 'nr_entries' values. fn emit_values(&mut self, w: &mut WriteBatcher, nr_entries: usize) -> Result<()> { assert!(nr_entries <= self.values.len()); // Write the node let mut keys = Vec::new(); let mut values = Vec::new(); for _i in 0..nr_entries { let (k, v) = self.values.pop_front().unwrap(); keys.push(k); values.push(v); } let wresult = self.nio.write(w, keys, values)?; // Push a summary to the 'nodes' vector. self.nodes.push(NodeSummary { block: wresult.loc, key: wresult.first_key, nr_entries, shared: self.shared, }); Ok(()) } /// Writes a full node. fn emit_node(&mut self, w: &mut WriteBatcher) -> Result<()> { self.emit_values(w, self.max_entries_per_node) } /// Emits all remaining values. Panics if there are more than 2 * /// max_entries_per_node values. fn emit_all(&mut self, w: &mut WriteBatcher) -> Result<()> { match self.values.len() { 0 => { // There's nothing to emit Ok(()) } n if n <= self.max_entries_per_node => { // Emit a single node. self.emit_values(w, n) } n if n <= self.max_entries_per_node * 2 => { // Emit two nodes. let n1 = n / 2; let n2 = n - n1; self.emit_values(w, n1)?; self.emit_values(w, n2) } _ => { panic!("self.values shouldn't have more than 2 * max_entries_per_node entries"); } } } fn emit_empty_leaf(&mut self, w: &mut WriteBatcher) -> Result<()> { self.emit_values(w, 0) } /// Pops the last node, and prepends it's values to 'self.values'. Used /// to rebalance when we have insufficient values for a final node. The /// node is decremented in the space map. fn unshift_node(&mut self, w: &mut WriteBatcher) -> Result<()> { let ls = self.nodes.pop().unwrap(); let (keys, values) = self.read_node(w, ls.block)?; w.sm.lock().unwrap().dec(ls.block)?; let mut vals = VecDeque::new(); for i in 0..keys.len() { // We only need to inc the values if the node was pre built. if ls.shared { self.value_rc.inc(&values[i])?; } vals.push_back((keys[i], values[i].clone())); } vals.append(&mut self.values); std::mem::swap(&mut self.values, &mut vals); Ok(()) } } //------------------------------------------ pub struct BTreeBuilder<V: Unpack + Pack> { leaf_builder: NodeBuilder<V>, } impl<V: Unpack + Pack + Clone> BTreeBuilder<V> { pub fn new(value_rc: Box<dyn RefCounter<V>>) -> BTreeBuilder<V> { BTreeBuilder { leaf_builder: NodeBuilder::new(Box::new(LeafIO {}), value_rc, false), } } pub fn push_value(&mut self, w: &mut WriteBatcher, k: u64, v: V) -> Result<()> { self.leaf_builder.push_value(w, k, v) } pub fn push_leaves(&mut self, w: &mut WriteBatcher, leaves: &[NodeSummary]) -> Result<()> { self.leaf_builder.push_nodes(w, leaves) } pub fn complete(self, w: &mut WriteBatcher) -> Result<u64> { let nodes = self.leaf_builder.complete(w)?; build_btree(w, nodes) } } //------------------------------------------ // Build a btree from a list of pre-built leaves pub fn build_btree(w: &mut WriteBatcher, leaves: Vec<NodeSummary>) -> Result<u64> { // Now we iterate, adding layers of internal nodes until we end // up with a single root. let mut nodes = leaves; while nodes.len() > 1 { let mut builder = NodeBuilder::new(Box::new(InternalIO {}), Box::new(NoopRC {}), false); for n in nodes { builder.push_value(w, n.key, n.block)?; } nodes = builder.complete(w)?; } assert!(nodes.len() == 1); let root = nodes[0].block; Ok(root) } //------------------------------------------ // The pre-built nodes and the contained values were initialized with // a ref count 1, which is analogous to a "tempoaray snapshot" of // potentially shared leaves. We have to drop those temporary references // to pre-built nodes at the end of device building, and also decrease // ref counts of the contained values if a pre-built leaf is no longer // referenced. pub fn release_leaves<V: Pack + Unpack>( w: &mut WriteBatcher, leaves: &[NodeSummary], value_rc: &mut dyn RefCounter<V>, ) -> Result<()> { let nio = LeafIO {}; for n in leaves { let deleted = w.sm.lock().unwrap().dec(n.block)?; if deleted { let (_, values) = nio.read(w, n.block)?; for v in values { value_rc.dec(&v)?; } } } Ok(()) } //------------------------------------------<|fim▁end|>
};
<|file_name|>McElieceKobaraImaiCipherTest.java<|end_file_name|><|fim▁begin|>package org.spongycastle.pqc.jcajce.provider.test; import java.security.KeyPairGenerator; import java.security.spec.AlgorithmParameterSpec; import javax.crypto.Cipher; import org.spongycastle.pqc.jcajce.spec.ECCKeyGenParameterSpec; <|fim▁hole|> extends AsymmetricHybridCipherTest { protected void setUp() { super.setUp(); try { kpg = KeyPairGenerator.getInstance("McElieceKobaraImai"); cipher = Cipher.getInstance("McElieceKobaraImaiWithSHA256"); } catch (Exception e) { e.printStackTrace(); } } /** * Test encryption and decryption performance for SHA256 message digest and parameters * m=11, t=50. */ public void testEnDecryption_SHA256_11_50() throws Exception { // initialize key pair generator AlgorithmParameterSpec kpgParams = new ECCKeyGenParameterSpec(11, 50); kpg.initialize(kpgParams); performEnDecryptionTest(1, 10, 32, null); } }<|fim▁end|>
public class McElieceKobaraImaiCipherTest
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2011-2015 Slack # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from django.test import TestCase as TestCaseBase from .scraper import beatport, discogs, itunes, junodownload, metalarchives, musicbrainz, bandcamp, musiksammler from .result import ReleaseResult, ListResult, NotFoundResult, Result import unittest def todict(obj): if hasattr(obj, "__iter__"): return [todict(v) for v in obj] elif hasattr(obj, "__dict__"): return dict([(key, todict(value)) for key, value in obj.__dict__.iteritems() if not callable(value) and not key.startswith('_')]) else: return obj class TestCase(TestCaseBase): maxDiff = None def setUp(self): self.addTypeEqualityFunc(ReleaseResult, 'assertResultEqual') self.addTypeEqualityFunc(ListResult, 'assertResultEqual') self.addTypeEqualityFunc(NotFoundResult, 'assertResultEqual') def assertResultEqual(self, d1, d2, msg=None): self.assertTrue(issubclass(d1.__class__, Result), 'First argument is not a Result') self.assertTrue(issubclass(d2.__class__, Result), 'Second argument is not a Result') self.assertEqual(d1.__class__.__name__, d2.__class__.__name__) self.assertEqual(todict(d1), todict(d2), msg) class DiscogsTest(TestCase): def test_simple_album(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(u'03 Nov 2000') release_event.set_country(u'Germany') expected.append_release_event(release_event) expected.set_format(u'CD, Album') label_id = expected.create_label_id() label_id.set_label(u'Richterskala') label_id.append_catalogue_nr(u'TRI 070 CD') expected.append_label_id(label_id) expected.set_title(u'Hast Du Mich Vermisst?') artist = expected.create_artist() artist.set_name(u'ASP') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre(u'Electronic') expected.append_genre(u'Rock') expected.append_style(u'Goth Rock') expected.append_style(u'Synth-pop') expected.set_url(u'http://www.discogs.com/ASP-Hast-Du-Mich-Vermisst/release/453432') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Schwarzer Schmetterling') track.set_length(290) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Where Do The Gods Go') track.set_length(226) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Dancing') track.set_length(345) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'K\xfcss Mich') track.set_length(311) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Sing Child') track.set_length(239) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'Teach Me War') track.set_length(225) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Imbecile Anthem') track.set_length(222) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Und Wir Tanzten (Ungeschickte Liebesbriefe)') track.set_length(305) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'Blinded') track.set_length(443) disc.append_track(track) expected.append_disc(disc) scraper = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/ASP-Hast-Du-Mich-Vermisst/release/453432') result = scraper.get_result() self.assertEqual(expected, result) def test_multiple_cds(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(u'25 May 2007') release_event.set_country(u'Germany') expected.append_release_event(release_event) expected.set_format(u'4 \xd7 CD, Compilation, Limited Edition, Digipak, Box Set, Limited Edition, Hand-Numbered') label_id = expected.create_label_id() label_id.set_label(u'[Trisol] Music Group GmbH') label_id.append_catalogue_nr(u'TRI 303 CD') expected.append_label_id(label_id) expected.set_title(u"The 'Once In A Lifetime' Recollection Box") artist = expected.create_artist() artist.set_name(u'ASP') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) artist = expected.create_artist() artist.set_name(u'Chamber') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre(u'Classical') expected.append_genre(u'Non-Music') expected.append_genre(u'Rock') expected.append_style(u'Acoustic') expected.append_style(u'Goth Rock') expected.append_style(u'Classical') expected.append_style(u'Speech') expected.set_url(u'http://www.discogs.com/ASP-Chamber-The-Once-In-A-Lifetime-Recollection-Box/release/977684') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Once In A Lifetime, Part 1') track.set_length(351) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u"A Dead Man's Song") track.set_length(312) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Versuchung') track.set_length(345) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Torn') track.set_length(304) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Demon Love') track.set_length(272) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'The Paperhearted Ghost') track.set_length(283) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'A Tale Of Real Love') track.set_length(316) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Hunger') track.set_length(289) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'The Truth About Snow-White') track.set_length(240) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u'She Wore Shadows') track.set_length(275) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'Und Wir Tanzten (Ungeschickte Liebesbriefe)') track.set_length(317) disc.append_track(track) track = disc.create_track() track.set_number(u'12') track.set_title(u'Once In A Lifetime, Part 2 (Reprise)') track.set_length(164) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(2) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'K\xfcss Mich') track.set_length(384) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Silence - Release') track.set_length(225) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Solitude') track.set_length(220) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Die Ballade Von Der Erweckung') track.set_length(527) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Another Conversation') track.set_length(201) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'Sing Child') track.set_length(449) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Ich Will Brennen') track.set_length(300) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Toscana') track.set_length(374) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'Ride On') track.set_length(222) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u'Hometown') track.set_length(181) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'Werben') track.set_length(293) disc.append_track(track) track = disc.create_track() track.set_number(u'12') track.set_title(u'Once In A Lifetime, Part 3 (Finale)') track.set_length(608) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(3) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'H\xe4sslich') track.set_length(145) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Backstage (All Areas)') track.set_length(573) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Paracetamoltr\xe4ume') track.set_length(517) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Ausszug Aus "Tremendista" Feat. Ralph M\xfcller/Gitarre') track.set_length(1473) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Campari O') track.set_length(159) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(4) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Asp, Soundcheck-Outtake: "Sicamore Trees"') track.set_length(94) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Demon Love') track.set_length(275) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'The Truth About Snow-White') track.set_length(274) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'She Wore Shadows') track.set_length(319) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Sing Child') track.set_length(469) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'Hometown') track.set_length(221) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Hunger') track.set_length(274) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Silence-Release') track.set_length(208) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'Asp, Soundcheck-Outtake: "She Moved Through The Fair"') track.set_length(120) disc.append_track(track) expected.append_disc(disc) scraper = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/ASP-Chamber-The-Once-In-A-Lifetime-Recollection-Box/release/977684') result = scraper.get_result() self.assertEqual(expected, result) def test_featuring_track_artist(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(u'01 Apr 2011') release_event.set_country(u'Europe') expected.append_release_event(release_event) expected.set_format(u'CD, Album') label_id = expected.create_label_id() label_id.set_label(u'Rootdown Records') label_id.append_catalogue_nr(u'RDM13074-2') expected.append_label_id(label_id) expected.set_title(u'Unter Freunden') artist = expected.create_artist() artist.set_name(u'Mono & Nikitaman') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre(u'Reggae') expected.append_style(u'Dancehall') expected.set_url(u'http://www.discogs.com/Mono-Nikitaman-Unter-Freunden/release/3432154') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Intro') track.set_length(13) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Unter Freunden') track.set_length(184) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Karma') track.set_length(189) track_artist = expected.create_artist() track_artist.set_name(u"Ce'cile") track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Zeit Steht Still') track.set_length(260) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Komplizen') track.set_length(185) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'Wenn Sich Der Nebel Verzieht') track.set_length(197) track_artist = expected.create_artist() track_artist.set_name(u'Gentleman') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Schwerelos') track.set_length(227) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Ein Paar Meter') track.set_length(198) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'Cash') track.set_length(188) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u'Dezibel') track.set_length(270) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'Kontrast') track.set_length(214) disc.append_track(track) track = disc.create_track() track.set_number(u'12') track.set_title(u'R\xfcckkehr Der Clowns') track.set_length(198) disc.append_track(track) track = disc.create_track() track.set_number(u'13') track.set_title(u'Superstar') track.set_length(227) disc.append_track(track) track = disc.create_track() track.set_number(u'14') track.set_title(u'Underground') track.set_length(204) disc.append_track(track) track = disc.create_track() track.set_number(u'15') track.set_title(u'Showdown') track.set_length(261) track_artist = expected.create_artist() track_artist.set_name(u'Rebellion') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) scraper = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Mono-Nikitaman-Unter-Freunden/release/3432154') result = scraper.get_result() self.assertEqual(expected, result) def test_remix_track_artist(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(u'04 Jul 2005') release_event.set_country(u'Germany') expected.append_release_event(release_event) expected.set_format(u'CD, Album, Limited Edition, Digibook, CD, Compilation, Limited Edition') label_id = expected.create_label_id() label_id.set_label(u'Trisol') label_id.append_catalogue_nr(u'TRI 231 CD') expected.append_label_id(label_id) expected.set_title(u'Aus Der Tiefe') artist = expected.create_artist() artist.set_name(u'ASP') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre(u'Electronic') expected.append_genre(u'Rock') expected.append_style(u'Alternative Rock') expected.set_url(u'http://www.discogs.com/ASP-Aus-Der-Tiefe-Der-Schwarze-Schmetterling-IV/release/710517') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Beschw\xf6rung') track.set_length(391) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Willkommen Zur\xfcck') track.set_length(137) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Schwarzes Blut') track.set_length(212) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Im Dunklen Turm') track.set_length(101) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Me') track.set_length(278) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'Schattenschreie') track.set_length(21) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Hunger') track.set_length(321) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Fremde Erinnerungen') track.set_length(72) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'Ballade Von Der Erweckung') track.set_length(533) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u'Tiefenrausch') track.set_length(245) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'Schmetterling, Du Kleines Ding') track.set_length(42) disc.append_track(track) track = disc.create_track() track.set_number(u'12') track.set_title(u'Ich Komm Dich Holn') track.set_length(257) disc.append_track(track) track = disc.create_track() track.set_number(u'13') track.set_title(u'Werben') track.set_length(268) disc.append_track(track) track = disc.create_track() track.set_number(u'14') track.set_title(u'Aus Der Tiefe') track.set_length(198) disc.append_track(track) track = disc.create_track() track.set_number(u'15') track.set_title(u'Spiegelaugen') track.set_length(204) disc.append_track(track) track = disc.create_track() track.set_number(u'16') track.set_title(u'Tiefenrausch (Reprise)') track.set_length(67) disc.append_track(track) track = disc.create_track() track.set_number(u'17') track.set_title(u'Panik') track.set_length(252) disc.append_track(track) track = disc.create_track() track.set_number(u'18') track.set_title(u'Spiegel') track.set_length(331) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(2) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Schwarzes Blut (Haltung Version)') track.set_length(249) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Werben (Subtil Edit)') track.set_length(257) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Me (Single Version)') track.set_length(225) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Tiefenrausch (Feat. Sara Noxx)') track.set_length(245) track_artist = expected.create_artist() track_artist.set_name(u'Sara Noxx') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Hunger (Single Mix)') track.set_length(259) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'Panik (Ganz Rauf-Verison)') track.set_length(273) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Beschw\xf6rung (Siegeszug Instrumental)') track.set_length(205) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Buch Des Vergessens (Unreines Spiegelsonett)') track.set_length(115) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'Kokon (Brandneu-Remix Von Umbra Et Imago)') track.set_length(279) track_artist = expected.create_artist() track_artist.set_name(u'Umbra Et Imago') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u'Me (Me And You Remix Von Blutengel)') track.set_length(344) track_artist = expected.create_artist() track_artist.set_name(u'Blutengel') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'Und Wir Tanzten (Ungeschickte Liebesbriefe) (Live)') track.set_length(347) disc.append_track(track) track = disc.create_track() track.set_number(u'12') track.set_title(u'Ich Will Brennen (Live)') track.set_length(369) disc.append_track(track) track = disc.create_track() track.set_number(u'13') track.set_title(u'Starfucker: In Der Folterkammer') track.set_length(127) disc.append_track(track) expected.append_disc(disc) scraper = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/ASP-Aus-Der-Tiefe-Der-Schwarze-Schmetterling-IV/release/710517') result = scraper.get_result() self.assertEqual(expected, result) def test_vinyl(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(u'2008') release_event.set_country(u'Germany') expected.append_release_event(release_event) expected.set_format(u'2 \xd7 Vinyl, LP') label_id = expected.create_label_id() label_id.set_label(u'Rootdown Records') label_id.append_catalogue_nr(u'RDM 13051-1') expected.append_label_id(label_id) expected.set_title(u'Ausser Kontrolle') artist = expected.create_artist() artist.set_name(u'Mono & Nikitaman') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre(u'Reggae') expected.append_style(u'Dancehall') expected.append_style(u'Reggae-Pop') expected.set_url(u'http://www.discogs.com/Mono-Nikitaman-Ausser-Kontrolle/release/1540929') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'A1') track.set_title(u'Intro') track.set_length(None) disc.append_track(track) track = disc.create_track() track.set_number(u'A2') track.set_title(u'Schlag Alarm') track.set_length(None) disc.append_track(track) track = disc.create_track() track.set_number(u'A3') track.set_title(u'Kann Ja Mal Passieren') track.set_length(None) disc.append_track(track) track = disc.create_track() track.set_number(u'A4') track.set_title(u'Ausser Kontrolle') track.set_length(None) disc.append_track(track) track = disc.create_track() track.set_number(u'A5') track.set_title("Hol's Dir") track.set_length(None) disc.append_track(track) track = disc.create_track() track.set_number(u'B1') track.set_title(u'Das Alles') track.set_length(None) disc.append_track(track) track = disc.create_track() track.set_number(u'B2') track.set_title(u'Digge Digge') track.set_length(None) disc.append_track(track) track = disc.create_track() track.set_number(u'B3') track.set_title(u'Nur So') track.set_length(None) disc.append_track(track) track = disc.create_track() track.set_number(u'B4') track.set_title(u'Yeah') track.set_length(None) disc.append_track(track) track = disc.create_track() track.set_number(u'C1') track.set_title(u'Von Osten Bis Westen') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Russkaja') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'C2') track.set_title(u'Wenn Ihr Schlaft') track.set_length(None) disc.append_track(track) track = disc.create_track() track.set_number(u'C3') track.set_title(u'Unterwegs') track.set_length(None) disc.append_track(track) track = disc.create_track() track.set_number(u'C4') track.set_title(u'Tiktak') track.set_length(None) disc.append_track(track) track = disc.create_track() track.set_number(u'D1') track.set_title(u'Tut Mir Leid') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Nosliw') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'D2') track.set_title(u'Es Kommt Anders') track.set_length(None) disc.append_track(track) track = disc.create_track() track.set_number(u'D3') track.set_title(u'Das Alles (Zion Train Remix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Zion Train') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Mono-Nikitaman-Ausser-Kontrolle/release/1540929') r = s.get_result() self.assertEqual(expected, r) def test_featuring_main_artist(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(u'08 Feb 2011') release_event.set_country(u'Germany') expected.append_release_event(release_event) expected.set_format(u'3 \xd7 File, MP3, 320 kbps') label_id = expected.create_label_id() label_id.set_label(u'Redux Recordings') label_id.append_catalogue_nr(u'RDX062') expected.append_label_id(label_id) expected.set_title(u'In My Dreams') artist = expected.create_artist() artist.set_name(u'Lifted Emotion') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) artist = expected.create_artist() artist.set_name(u'Anastasiia Purple') artist.set_various(False) artist.append_type(expected.ArtistTypes.FEATURING) expected.append_release_artist(artist) expected.append_genre(u'Electronic') expected.append_style(u'Trance') expected.set_url(u'http://www.discogs.com/Lifted-Emotion-feat-Anastasiia-Purple-In-My-Dreams/release/2806179') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'In My Dreams (Original Vocal Mix)') track.set_length(558) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'In My Dreams (Original Dub Mix)') track.set_length(558) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'In My Dreams (Ost & Meyer Extraodinary Mix)') track.set_length(472) track_artist = expected.create_artist() track_artist.set_name(u'Ost & Meyer') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Lifted-Emotion-feat-Anastasiia-Purple-In-My-Dreams/release/2806179') r = s.get_result() self.assertEqual(expected, r) def test_various_artists(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(u'2010') release_event.set_country(u'Germany') expected.append_release_event(release_event) expected.set_format(u'CD, Compilation') label_id = expected.create_label_id() label_id.set_label(u'Batbeliever Releases') label_id.append_catalogue_nr(u'BAT 075') expected.append_label_id(label_id) expected.set_title(u'Gothic File 14') artist = expected.create_artist() artist.set_name(None) artist.set_various(True) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre(u'Electronic') expected.append_genre(u'Rock') expected.append_style(u'EBM') expected.append_style(u'Darkwave') expected.append_style(u'Industrial') expected.append_style(u'Goth Rock') expected.append_style(u'Electro') expected.set_url(u'http://www.discogs.com/Various-Gothic-File-14/release/3700493') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Echo In Me') track.set_length(236) track_artist = expected.create_artist() track_artist.set_name(u'Diary Of Dreams') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Liar (Version)') track.set_length(219) track_artist = expected.create_artist() track_artist.set_name(u'Gothminister') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'The End Of It All (Edit)') track.set_length(237) track_artist = expected.create_artist() track_artist.set_name(u'Sirenia') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Sanctuary') track.set_length(239) track_artist = expected.create_artist() track_artist.set_name(u'Merciful Nuns') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Worlds Collide (Demo Version)') track.set_length(261) track_artist = expected.create_artist() track_artist.set_name(u'Covenant') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'Drowning World') track.set_length(253) track_artist = expected.create_artist() track_artist.set_name(u'Ien Oblique') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'In The Name Of God') track.set_length(297) track_artist = expected.create_artist() track_artist.set_name(u'Betamorphose') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'PsychoCop (Folge 8)') track.set_length(171) track_artist = expected.create_artist() track_artist.set_name(u'Don Harris') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Various-Gothic-File-14/release/3700493') r = s.get_result() self.assertEqual(expected, r) def test_label_with_suffix(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(u'25 Nov 2005') release_event.set_country(u'Germany') expected.append_release_event(release_event) expected.set_format(u'CD, Album') label_id = expected.create_label_id() label_id.set_label(u'Indigo') label_id.append_catalogue_nr(u'CD 55182') expected.append_label_id(label_id) expected.set_title(u'Prima Nocte') artist = expected.create_artist() artist.set_name(u'Feuerschwanz') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre(u'Folk') expected.append_genre(u'World') expected.append_genre(u'Country') expected.append_genre(u'Rock') expected.append_style(u'Medieval') expected.set_url(u'http://www.discogs.com/Feuerschwanz-Prima-Nocte/release/2611694') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Es War Einmal') track.set_length(172) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Das Mittelalter') track.set_length(260) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Drachentanz') track.set_length(224) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Das Turnier') track.set_length(254) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Prima Nocte') track.set_length(331) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'B\xe4rentanz') track.set_length(232) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Herren Der Winde') track.set_length(265) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Der Teufel') track.set_length(290) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'Schneewittchen') track.set_length(377) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u'Der Traum') track.set_length(319) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'R\xe4uber') track.set_length(206) disc.append_track(track) track = disc.create_track() track.set_number(u'12') track.set_title(u'Sauflied') track.set_length(234) disc.append_track(track) track = disc.create_track() track.set_number(u'13') track.set_title(u'Teufelsgeschenk') track.set_length(264) disc.append_track(track) track = disc.create_track() track.set_number(u'14') track.set_title(u'La\xdft Die Ritter Schlafen') track.set_length(313) disc.append_track(track) track = disc.create_track() track.set_number(u'15') track.set_title(u'Gute Nacht') track.set_length(420) disc.append_track(track) expected.append_disc(disc) s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Feuerschwanz-Prima-Nocte/release/2611694') r = s.get_result() self.assertEqual(expected, r) def test_album_with_unicode_dash_in_title(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(u'25 Jun 2012') release_event.set_country(u'UK') expected.append_release_event(release_event) expected.set_format(u'CD, Album') label_id = expected.create_label_id() label_id.set_label(u'Ash International') label_id.append_catalogue_nr(u'Ash 9.5') expected.append_label_id(label_id) expected.set_title(u'AUN \u2013 The Beginning And The End Of All Things') artist = expected.create_artist() artist.set_name(u'Christian Fennesz') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre(u'Electronic') expected.append_genre(u'Stage & Screen') expected.append_style(u'Abstract') expected.append_style(u'Ambient') expected.append_style(u'Modern Classical') expected.append_style(u'Soundtrack') expected.set_url(u'http://www.discogs.com/Christian-Fennesz-AUN-The-Beginning-And-The-End-Of-All-Things/release/2881000') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Kae') track.set_length(131) track_artist = expected.create_artist() track_artist.set_name(u'Christian Fennesz') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Aware') track.set_length(288) track_artist = expected.create_artist() track_artist.set_name(u'Fennesz Sakamoto') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Haru') track.set_length(282) track_artist = expected.create_artist() track_artist.set_name(u'Fennesz Sakamoto') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Sekai') track.set_length(134) track_artist = expected.create_artist() track_artist.set_name(u'Christian Fennesz') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Euclides') track.set_length(184) track_artist = expected.create_artist() track_artist.set_name(u'Christian Fennesz') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'Sasazuka') track.set_length(231) track_artist = expected.create_artist() track_artist.set_name(u'Christian Fennesz') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Trace') track.set_length(349) track_artist = expected.create_artist() track_artist.set_name(u'Fennesz Sakamoto') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Mori') track.set_length(75) track_artist = expected.create_artist() track_artist.set_name(u'Christian Fennesz') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'AUN40') track.set_length(306) track_artist = expected.create_artist() track_artist.set_name(u'Christian Fennesz') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u'Namuru') track.set_length(170) track_artist = expected.create_artist() track_artist.set_name(u'Christian Fennesz') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'Himitsu') track.set_length(166) track_artist = expected.create_artist() track_artist.set_name(u'Christian Fennesz') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'12') track.set_title(u'AUN80') track.set_length(217) track_artist = expected.create_artist() track_artist.set_name(u'Christian Fennesz') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'13') track.set_title(u'Nympha') track.set_length(150) track_artist = expected.create_artist() track_artist.set_name(u'Christian Fennesz') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'14') track.set_title(u'Shinu') track.set_length(215) track_artist = expected.create_artist() track_artist.set_name(u'Christian Fennesz') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'15') track.set_title(u'Hikari') track.set_length(256) track_artist = expected.create_artist() track_artist.set_name(u'Christian Fennesz') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Christian-Fennesz-AUN-The-Beginning-And-The-End-Of-All-Things/release/2881000') r = s.get_result() self.assertEqual(expected, r) def test_master_release(self): expected = ListResult() expected.set_scraper_name(None) item = expected.create_item() item.set_name(u'Mystic Matt & Anthill Mob \u2013 Burning') item.set_info(u'10", Pic, Ltd | Love Peace And Unity Recordings | LOVE-03 | UK | 1997') item.set_query(u'http://www.discogs.com/release/146468') item.set_url(u'http://www.discogs.com/release/146468') expected.append_item(item) item = expected.create_item() item.set_name(u'Mystic Matt & Anthill Mob \u2013 Burning') item.set_info(u'10", Pic, Ltd, S/Sided | Love Peace And Unity Recordings | LOVE-03 | UK | 1997') item.set_query(u'http://www.discogs.com/release/1503116') item.set_url(u'http://www.discogs.com/release/1503116') expected.append_item(item) item = expected.create_item() item.set_name(u'Mystic Matt & Anthill Mob \u2013 Burning') item.set_info(u'12", Ltd, Pic | Love Peace And Unity Recordings | LOVE-06 | UK | 2006-04-18') item.set_query(u'http://www.discogs.com/release/670448') item.set_url(u'http://www.discogs.com/release/670448') expected.append_item(item) item = expected.create_item() item.set_name(u'Mystic Matt & Anthill Mob \u2013 Burnin') item.set_info(u'12", Promo | Classic Confetti | CC 02 | UK | 2001') item.set_query(u'http://www.discogs.com/release/2093234') item.set_url(u'http://www.discogs.com/release/2093234') expected.append_item(item) item = expected.create_item() item.set_name(u"Mystic Matt & Anthill Mob \u2013 'Burnin (Let The Music)'") item.set_info(u'12" | Classic Confetti | CC 02 (2) | UK | 2001') item.set_query(u'http://www.discogs.com/release/284437') item.set_url(u'http://www.discogs.com/release/284437') expected.append_item(item) item = expected.create_item() item.set_name(u'Mystic Matt & Anthill Mob \u2013 Burnin') item.set_info(u'12" | Classic Confetti | CC 02 | UK | 2001') item.set_query(u'http://www.discogs.com/release/149302') item.set_url(u'http://www.discogs.com/release/149302') expected.append_item(item) item = expected.create_item() item.set_name(u'Mystic Matt & Anthill Mob \u2013 Burnin') item.set_info(u'12" | Classic Confetti | CC02(3F/02) | UK & Europe | 2002') item.set_query(u'http://www.discogs.com/release/739159') item.set_url(u'http://www.discogs.com/release/739159') expected.append_item(item) s = discogs.MasterScraper.from_string('http://www.discogs.com/Mystic-Matt-Anthill-Mob-Burning/master/181860') r = s.get_result() self.assertEqual(expected, r) def test_album_with_face_in_track_numbers(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(u'1984') release_event.set_country(u'Sweden') expected.append_release_event(release_event) expected.set_format(u'Vinyl, 7"') label_id = expected.create_label_id() label_id.set_label(u'Mamma') label_id.append_catalogue_nr(u'MA-501') expected.append_label_id(label_id) expected.set_title(u'Another Story') artist = expected.create_artist() artist.set_name(u'General Belgrano') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre(u'Rock') expected.append_style(u'New Wave') expected.set_url(u'http://www.discogs.com/General-Belgrano-Another-Story/release/2213179') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'Face I') track.set_title(u'Another Story') track.set_length(None) disc.append_track(track) track = disc.create_track() track.set_number(u'Face II') track.set_title("War Isn't Gold") track.set_length(None) disc.append_track(track) expected.append_disc(disc) s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/General-Belgrano-Another-Story/release/2213179') r = s.get_result() self.assertEqual(expected, r) def test_album_with_roman_track_numbers(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(u'22 Apr 2014') release_event.set_country(u'US') expected.append_release_event(release_event) expected.set_format(u'CD, Album, Deluxe Edition, Target Edition') label_id = expected.create_label_id() label_id.set_label(u'Goodbye Records') label_id.append_catalogue_nr(u'GLS-0161-02') expected.append_label_id(label_id) label_id = expected.create_label_id() label_id.set_label(u'Glassnote') label_id.append_catalogue_nr(u'GLS-0161-02') expected.append_label_id(label_id) expected.set_title(u'The Bones Of What You Believe') artist = expected.create_artist() artist.set_name(u'Chvrches') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre(u'Electronic') expected.append_genre(u'Pop') expected.append_style(u'Indie Pop') expected.append_style(u'Synth-pop') expected.set_url(u'http://www.discogs.com/Chvrches-The-Bones-Of-What-You-Believe/release/5622231') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'I') track.set_title(u'The Mother We Share') track.set_length(192) disc.append_track(track) track = disc.create_track() track.set_number(u'II') track.set_title(u'We Sink') track.set_length(214) disc.append_track(track) track = disc.create_track() track.set_number(u'III') track.set_title(u'Gun') track.set_length(234) disc.append_track(track) track = disc.create_track() track.set_number(u'IV') track.set_title(u'Tether') track.set_length(286) disc.append_track(track) track = disc.create_track() track.set_number(u'V') track.set_title(u'Lies') track.set_length(221) disc.append_track(track) track = disc.create_track() track.set_number(u'VI') track.set_title(u'Under The Tide') track.set_length(272) disc.append_track(track) track = disc.create_track() track.set_number(u'VII') track.set_title(u'Recover') track.set_length(226) disc.append_track(track) track = disc.create_track() track.set_number(u'VIII') track.set_title(u'Night Sky') track.set_length(231) disc.append_track(track) track = disc.create_track() track.set_number(u'IX') track.set_title(u'Science/Visions') track.set_length(238) disc.append_track(track) track = disc.create_track() track.set_number(u'X') track.set_title(u'Lungs') track.set_length(183) disc.append_track(track) track = disc.create_track() track.set_number(u'XI') track.set_title(u'By The Throat') track.set_length(249) disc.append_track(track) track = disc.create_track() track.set_number(u'XII') track.set_title(u'You Caught The Light') track.set_length(337) disc.append_track(track) track = disc.create_track() track.set_number(u'XIII') track.set_title(u'Recover (Alucard Session)') track.set_length(252) disc.append_track(track) track = disc.create_track() track.set_number(u'XIV') track.set_title(u'The Mother We Share (Alucard Session)') track.set_length(198) disc.append_track(track) track = disc.create_track() track.set_number(u'XV') track.set_title(u'Gun (Alucard Session)') track.set_length(265) disc.append_track(track) track = disc.create_track() track.set_number(u'XVI') track.set_title(u'Tightrope') track.set_length(209) disc.append_track(track) expected.append_disc(disc) s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Chvrches-The-Bones-Of-What-You-Believe/release/5622231') r = s.get_result() self.assertEqual(expected, r) def test_featuring_track_artist_in_artist_column(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(u'2014') release_event.set_country(u'Portugal') expected.append_release_event(release_event) expected.set_format(u'3 \xd7 CD, Compilation') label_id = expected.create_label_id() label_id.set_label(u'Vidisco') label_id.append_catalogue_nr(u'11.80.9534') expected.append_label_id(label_id) expected.set_title(u'Caribe Grande \xcaxitos 2014') artist = expected.create_artist() artist.set_name(None) artist.set_various(True) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre(u'Electronic') expected.append_genre(u'Latin') expected.append_genre(u'Pop') expected.append_genre(u'Folk') expected.append_genre(u'World') expected.append_genre(u'Country') expected.append_style(u'African') expected.append_style(u'Electro House') expected.append_style(u'Forr\xf3') expected.append_style(u'Latin') expected.append_style(u'House') expected.set_url(u'http://www.discogs.com/Various-Caribe-Grande-%C3%8Axitos-2014/release/5586877') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Show Das Poderosas') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Kelly Pink') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u"Me Agarra So' No Uhm") track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Landrick') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Mi Ni\xf1a') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Pedro Amorim') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'David Miks') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Menina Loka') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Jey V') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'MC Y2K') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Meu Eu Em Vo\xe7\xea') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Tayla Riddel') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'N\xe3o P\xe1ra (Ela S\xf3 Quer Dan\xe7ar)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u"Johne D'luka") track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Bam Bam Bam') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Rogerinho') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Vem A Mi') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'DJ Bheaven') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'Pecado') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Irm\xe3os Verdades') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u'We Gonna Party') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Pedro Amorim') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Daduh King') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Gao Percussion') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'Ela \xc9 Top') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Mc Bola') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Mc Rodriguez') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'12') track.set_title(u'Love Love') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'David Miks') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u"R'Bros") track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'13') track.set_title(u'Vamos Zuar') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Bryan Wilson') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Sebastian Crayn') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Mc Bola') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'14') track.set_title(u'Bailando Asi') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'DJ BodySoul') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Hugo Bessa') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Jay Laroye') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Stape') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'15') track.set_title(u'Quem \xc9 Essa Mulher') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Valdemiro Jos\xe9') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Matias Dam\xe1sio') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'16') track.set_title(u'Ensa\xf1ame (Conexi\xf3n)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Carlitos Rossy') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Pipe Calderon') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'El Cata') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Nova La Amenaza') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'M\xe1s') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'17') track.set_title(u'Hoje N\xe3o Saio Daqui (Oh Tcha Tcharara)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'3 Beatz Muzik') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'18') track.set_title(u'Bailando') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Mike Moonnight') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Mark F') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Vic J') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Alex B') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'19') track.set_title(u'Noche De Fiesta') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Jose Delgado') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'David Miks') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'20') track.set_title(u'Beijo Bom') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Hallux') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Marcus') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Lilian Raquel') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'21') track.set_title(u'Mexe Assim') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'DJ Bodysoul') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'22') track.set_title(u'Malandro') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Jey V') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'NGA') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(2) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Trair A Minha Namorada (Hoje Eu Quero Trair) (DJ Bruno F Remix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'MC Ricardo') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'DJ Bruno F') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Quem Bate') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u"R'Bros") track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Taty Agressivo') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Bango') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Jay Lima') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Otro Dia (Mastkisoul Remix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Gregor Salto') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Kit (Kuenta / Tambu)') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Mastiksoul') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Mina Loca') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Bodytalk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'DJ Tiago') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Pm Akordeon') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Thiago Martins') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'I Found You (The Spacemakers Dirty Radio Edit)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Bryan Wilson') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Sebastian Crayn') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Quero Bail\xe1 (Mastik Jay Remix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Jey V') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Ademar') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Morena (Massivedrum Remix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Franklin Rodriques') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'William') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Massivedrum') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'Stronger (DJ Bruno F Radio Edit)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Da Fonseca') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Jay Lion') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Daniela Pimenta') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Bruno Soares Sax') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'DJ Bruno F') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u'Xibita (DJ Mike C Radio Edit)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Meith') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Y.a.m.a') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Mc Guy H.') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'DJ Mike C') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'Africanism') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Alvaro Corz') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'12') track.set_title(u'M\xfasica') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Bryan Dalton') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'13') track.set_title(u'Te Voy Amar (Soul Beatz Remix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'DJ Pedrito') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Mike Moonnight') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Vic J') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'14') track.set_title(u'Players') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Brian Chundro') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Santos') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Marlldexx') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'15') track.set_title(u'Vem Rebolando') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'James Noyer') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'16') track.set_title(u'Vale A Pena 2k14') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'D-Rashid') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Praia Del Sol') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'17') track.set_title(u'Dan\xe7a Do Tchira') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Mika G') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'18') track.set_title(u'Bagulho') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'DJ Mike C') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'19') track.set_title(u'Nrg') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Glowinthedark') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Chuckie') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(3) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Mila') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Banda Mar Azul') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Canto Da Cidade') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Banda Mar Azul') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Beleza Rara') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Banda Mar Azul') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Chorando Se Foi') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Bahia Tropical') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Amor Perfeito') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Ax\xe9 Bahia') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'Ranpuzel') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Banda Mar Azul') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Sorte Grande') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Bahia Pagode Tropical') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Saia E Bicicletinha') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Banda Ka\xe7amba') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'T\xf4 Nem A\xed') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Daniela') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u'Sozinho "Dance "') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Banda Mar Azul') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) # TODO: decide how to handle this weird Discogs track numbering corner case # track = disc.create_track() # track.set_number(u'11') # track.set_title(u'Pout-Pourri 1') # track.set_length(None) # track_artist = expected.create_artist() # track_artist.set_name(u'Unknown Artist') # track_artist.set_various(False) # track_artist.append_type(expected.ArtistTypes.MAIN) # track.append_artist(track_artist) # disc.append_track(track) # # track = disc.create_track() # track.set_number(u'12') # track.set_title(u'Pout-Pourri 2') # track.set_length(None) # track_artist = expected.create_artist() # track_artist.set_name(u'Unknown Artist') # track_artist.set_various(False) # track_artist.append_type(expected.ArtistTypes.MAIN) # track.append_artist(track_artist) # disc.append_track(track) # # track = disc.create_track() # track.set_number(u'13') # track.set_title(u'Pout-Pourri 3 (Marchas)') # track.set_length(None) # track_artist = expected.create_artist() # track_artist.set_name(u'Unknown Artist') # track_artist.set_various(False) # track_artist.append_type(expected.ArtistTypes.MAIN) # track.append_artist(track_artist) # disc.append_track(track) # # track = disc.create_track() # track.set_number(u'14') # track.set_title(u'Pout-Pourri 4') # track.set_length(None) # track_artist = expected.create_artist() # track_artist.set_name(u'Unknown Artist') # track_artist.set_various(False) # track_artist.append_type(expected.ArtistTypes.MAIN) # track.append_artist(track_artist) # disc.append_track(track) # # track = disc.create_track() # track.set_number(u'15') # track.set_title(u'Pout-Pourri 5') # track.set_length(None) # track_artist = expected.create_artist() # track_artist.set_name(u'Unknown Artist') # track_artist.set_various(False) # track_artist.append_type(expected.ArtistTypes.MAIN) # track.append_artist(track_artist) # disc.append_track(track) # # track = disc.create_track() # track.set_number(u'16') # track.set_title(u'Los Mayos- Disco Samba') # track.set_length(None) # track_artist = expected.create_artist() # track_artist.set_name(u'Unknown Artist') # track_artist.set_various(False) # track_artist.append_type(expected.ArtistTypes.MAIN) # track.append_artist(track_artist) # disc.append_track(track) # # track = disc.create_track() # track.set_number(u'17') # track.set_title(u'Pout-porri 6') # track.set_length(None) # track_artist = expected.create_artist() # track_artist.set_name(u'Unknown Artist') # track_artist.set_various(False) # track_artist.append_type(expected.ArtistTypes.MAIN) # track.append_artist(track_artist) # disc.append_track(track) # # track = disc.create_track() # track.set_number(u'18') # track.set_title(u'Pout-porri 7') # track.set_length(None) # track_artist = expected.create_artist() # track_artist.set_name(u'Unknown Artist') # track_artist.set_various(False) # track_artist.append_type(expected.ArtistTypes.MAIN) # track.append_artist(track_artist) # disc.append_track(track) track = disc.create_track() track.set_number(u'19') track.set_title(u'Marcha Do Cord\xe3o Do Bola Preta') track.set_length(None) disc.append_track(track) # track = disc.create_track() # track.set_number(u'20') # track.set_title(u'Pout-porri 8') # track.set_length(None) # track_artist = expected.create_artist() # track_artist.set_name(u'Unknown Artist') # track_artist.set_various(False) # track_artist.append_type(expected.ArtistTypes.MAIN) # track.append_artist(track_artist) # disc.append_track(track) expected.append_disc(disc) s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Various-Caribe-Grande-%C3%8Axitos-2014/release/5586877') r = s.get_result() self.assertEqual(expected, r) def test_featuring_in_artist_and_track_column(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(u'20 Nov 1996') release_event.set_country(u'Germany') expected.append_release_event(release_event) expected.set_format(u'2 \xd7 CD, Compilation') label_id = expected.create_label_id() label_id.set_label(u'Sony Music Media') label_id.append_catalogue_nr(u'SMM 486760 2') expected.append_label_id(label_id) label_id = expected.create_label_id() label_id.set_label(u'Sony Music Media') label_id.append_catalogue_nr(u'486760 2') expected.append_label_id(label_id) expected.set_title(u'Dream Dance Vol. 3') artist = expected.create_artist() artist.set_name(None) artist.set_various(True) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre(u'Electronic') expected.append_style(u'Trance') expected.set_url(u'http://www.discogs.com/Various-Dream-Dance-Vol-3/release/135664') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Salva Mea (Radio Edit)') track.set_length(224) track_artist = expected.create_artist() track_artist.set_name(u'Faithless') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'A Neverending Dream (Dream Dance Anthem Mix)') track.set_length(221) track_artist = expected.create_artist() track_artist.set_name(u'Trance X') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'7 Seconds (Radio-Video-Single)') track.set_length(248) track_artist = expected.create_artist() track_artist.set_name(u'Nomansland') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'One And One (Space Edit)') track.set_length(218) track_artist = expected.create_artist() track_artist.set_name(u'Ronald Snypes') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Sweet Memories (Radio Edit)') track.set_length(234) track_artist = expected.create_artist() track_artist.set_name(u'Groove Solution') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'Fall Down On Me (Zhi-Vago In Mission Radio Edit)') track.set_length(245) track_artist = expected.create_artist() track_artist.set_name(u'Solid') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Zhi-Vago') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Cybertrance') track.set_length(252) track_artist = expected.create_artist() track_artist.set_name(u'Blue Alphabet') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Beautiful Place (Airwaves Mix)') track.set_length(204) track_artist = expected.create_artist() track_artist.set_name(u'Paul van Dyk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'Floating (7\u2033-Mix)') track.set_length(249) track_artist = expected.create_artist() track_artist.set_name(u'Terra Ferma') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u'The Nighttrain (Dream Station Remix)') track.set_length(369) track_artist = expected.create_artist() track_artist.set_name(u'Kadoc') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'Silencer I (Extended Mix)') track.set_length(336) track_artist = expected.create_artist() track_artist.set_name(u'DJ The Crow') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'12') track.set_title("Insomniak: I'll Be Your Nightmare (Industrial Mix)") track.set_length(341) track_artist = expected.create_artist() track_artist.set_name(u'The Grooveman') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'13') track.set_title(u'Masterpiece') track.set_length(238) track_artist = expected.create_artist() track_artist.set_name(u'P-Casso') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'14') track.set_title(u'My Dimension (Radio Edit)') track.set_length(205) track_artist = expected.create_artist() track_artist.set_name(u'DJ Panda') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'15') track.set_title(u'Secret World (Radio Mix)') track.set_length(222) track_artist = expected.create_artist() track_artist.set_name(u'Vector Mode') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'16') track.set_title(u'Secret Love (Single Edit)') track.set_length(234) track_artist = expected.create_artist() track_artist.set_name(u'Magnetic Pulstar') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'17') track.set_title(u'Atlanta (Sunshine State Club Extravaganza)') track.set_length(402) track_artist = expected.create_artist() track_artist.set_name(u'Sunshine State') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Snake Davis') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'18') track.set_title("Walk On By (JPO's & Beam's Radio Mix)") track.set_length(236) track_artist = expected.create_artist() track_artist.set_name(u'M.R.') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'JPO & Beam') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(2) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Up To No Good (Radio Edit)') track.set_length(210) track_artist = expected.create_artist() track_artist.set_name(u'Porn Kings') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Reality (Too Short Video Radio)') track.set_length(231) track_artist = expected.create_artist() track_artist.set_name(u'RMB') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Da Beat Goes\u2026 (Radio Mix)') track.set_length(228) track_artist = expected.create_artist() track_artist.set_name(u'Red 5') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title("Why Don't You Dance With Me (Phuture Mix)") track.set_length(204) track_artist = expected.create_artist() track_artist.set_name(u'Futura Nostra') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Love And Fate (Part 2)') track.set_length(296) track_artist = expected.create_artist() track_artist.set_name(u'Love And Fate') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'Go (Woodtick Mix)') track.set_length(323) track_artist = expected.create_artist() track_artist.set_name(u'Moby') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Waters (Phase 2)') track.set_length(320) track_artist = expected.create_artist() track_artist.set_name(u'Taucher') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Do You See The Light (Dance 2 Trance Mix)') track.set_length(481) track_artist = expected.create_artist() track_artist.set_name(u'Snap!') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Dance 2 Trance') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'Right In The Night (Fall In Love With Music) (Microbots Remix)') track.set_length(383) track_artist = expected.create_artist() track_artist.set_name(u'Jam & Spoon') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Plavka') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Microbots') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u'Te Quierro (Trance Mix)') track.set_length(331) track_artist = expected.create_artist() track_artist.set_name(u'Intrance Feat. D-Sign') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'Hablando (Acordeon Mix)') track.set_length(391) track_artist = expected.create_artist() track_artist.set_name(u'Ramirez') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Pizarro') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'12') track.set_title(u'Outsiders (Marusha 7\u2033 Edit)') track.set_length(240) track_artist = expected.create_artist() track_artist.set_name(u'Yves Deruyter') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Marusha') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'13') track.set_title(u'The Wildlife') track.set_length(217) track_artist = expected.create_artist() track_artist.set_name("Mijk's Magic Marble Box") track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'14') track.set_title(u'Rock Your Body (Radio Mix)') track.set_length(221) track_artist = expected.create_artist() track_artist.set_name(u'The M. Experience III') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'15') track.set_title("It's A Dream Song") track.set_length(228) track_artist = expected.create_artist() track_artist.set_name(u'DJ Hooligan') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'16') track.set_title(u'Rhapsody In E') track.set_length(363) track_artist = expected.create_artist() track_artist.set_name(u'Scooter') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Various-Dream-Dance-Vol-3/release/135664') r = s.get_result() self.assertEqual(expected, r) def test_special_track_row_class(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(u'1999') release_event.set_country(u'UK') expected.append_release_event(release_event) expected.set_format(u'CD, Album') label_id = expected.create_label_id() label_id.set_label(u'Red Wharf') label_id.append_catalogue_nr(u'RWCD004') expected.append_label_id(label_id) expected.set_title(u'Pilgrim') artist = expected.create_artist() artist.set_name(u'Graham Bowers') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre(u'Electronic') expected.append_genre(u'Jazz') expected.append_style(u'Modern Classical') expected.set_url(u'http://www.discogs.com/Graham-Bowers-Pilgrim/release/728845') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'1a') track.set_title(u'Unconditional') track.set_length(None) disc.append_track(track) track = disc.create_track() track.set_number(u'1b') track.set_title(u'Loss Of Innocence') track.set_length(None) disc.append_track(track) track = disc.create_track() track.set_number(u'1c') track.set_title(u'Mechanistics') track.set_length(None) disc.append_track(track) expected.append_disc(disc) s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Graham-Bowers-Pilgrim/release/728845') r = s.get_result() self.assertEqual(expected, r) def test_multiple_special_arists_for_same_track(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(u'2000') release_event.set_country(u'UK') expected.append_release_event(release_event) expected.set_format(u'3 \xd7 CD, Mixed') label_id = expected.create_label_id() label_id.set_label(u'Pure Silk Records') label_id.append_catalogue_nr(u'PURESCD3') expected.append_label_id(label_id) expected.set_title(u'Pure Silk: The Third Dimension') artist = expected.create_artist() artist.set_name(None) artist.set_various(True) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre(u'Electronic') expected.append_style(u'UK Garage') expected.set_url(u'http://www.discogs.com/Various-Pure-Silk-The-Third-Dimension/release/463634') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Sunshine (Wookie Main Mix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Gabrielle') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Wookie') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Baby Gonna Rock Dis (Original Mix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Chris Mack') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'MC Nuts') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Girls Like Us') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'B-15 Project') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Crissy D') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Lady G') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Streetlife (Original 12" Mix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Daryl B') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Mark Yardley') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title("Don't Waste My Time (4 Beat Mix)") track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'The Wideboys') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'Everybody Come On (Stanton Warriors Remix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'DJ Skribble') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Stanton Warriors') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Get Loose') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'DJ Double G') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Right Before My Eyes (The Remix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name("N'n'G") track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'MC Neat') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'What Ya Gonna Do') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Artful Dodger') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title("You Don't Know (Marvel & Eli Remix)") track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'702') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Marvel & Eli') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'I Keep') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name("N'n'G") track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'12') track.set_title(u'Spirit Of The Sun (Bump & Flex Remix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Lenny Fontana') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Bump & Flex') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'13') track.set_title(u'Celebrate Life (Zed Bias Vocal Mix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Brasstooth') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Sean Mitchell') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Zed Bias') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'14') track.set_title("Baby (You're So Sexy) (Dub)") track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Dem 2') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'15') track.set_title(u'Glad You Came To Me (Steve Gurley VIP Mix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'United Grooves Collective') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Steve Gurley') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'16') track.set_title(u'Find The Path (Sweet Release Mix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'New Horizons') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'17') track.set_title(u'Give Some To Me (Masterplan)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Suiceyed') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'18') track.set_title(u'That Sound') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Michael Moog') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(2) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Sweeter Than Wine') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Dionne Rakeem') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Dreem House Productions') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Feel It') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name("London's Unique 3") track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Neighbourhood') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Zed Bias') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'MC Rumpus') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Madness On The Street') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Richie Boy') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'DJ Klasse') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title("Jump 'n' Shout (Dem 2 Mix)") track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Basement Jaxx') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Dem 2') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title("Somebody Else's Guy (Stanton Warriors Vocal Mix)") track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Jocelyn Brown') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Stanton Warriors') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Let Me Know') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'K.C.') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Zed Bias') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'For Real') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'M Dubs') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'J.P.') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'Tingles 2000 (Zed Bias Remix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Valerie M') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Zed Bias') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title("Buddy X '99 (Original Dreem Teem Dub Mix)") track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Dreem Teem') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Neneh Cherry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'Apparently Nothing (Artful Dodger Mix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'The Brand New Heavies') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Artful Dodger') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'12') track.set_title(u'Sometimes It Snows In April (Dreem House Dub Mix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Amar') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'MC Ranking') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Dreem House Productions') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'13') track.set_title(u'Down On Me') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Wookie') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'14') track.set_title(u'Hold Me Tight') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'DJ Lewi') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'15') track.set_title(u'Beautiful (Dreem House Dub Mix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Groove Control') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Dreem House Productions') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'16') track.set_title(u'Call It Fate') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Richie Dan') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'17') track.set_title(u'A Little Bit Of Luck') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'DJ Luck & MC Neat') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'18') track.set_title(u'I Want You') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Rosie Gaines') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(3) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Down On Me') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Wookie') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Everybody Come On (Stanton Warriors Remix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'DJ Skribble') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Stanton Warriors') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'I Keep') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name("N'n'G") track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Celebrate Life (Zed Bias Vocal Mix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Brasstooth') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Sean Mitchell') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Zed Bias') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Sunshine (Wookie Main Mix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Gabrielle') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Wookie') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'Neighbourhood') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Zed Bias') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'MC Rumpus') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Give Some To Me (Masterplan)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Suiceyed') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title("You Don't Know (Marvel & Eli Remix)") track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'702') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Marvel & Eli') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title("Somebody Else's Guy (Stanton Warriors Filthy Silk Dub)") track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Jocelyn Brown') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Stanton Warriors') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u'Right Before My Eyes (The Remix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name("N'n'G") track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'MC Neat') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'Baby Gonna Rock Dis') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Chris Mack') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'MC Nuts') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'12') track.set_title(u'Girls Like Us') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'B-15 Project') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Crissy D') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Lady G') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'13') track.set_title("Jump 'n' Shout (Dem 2 Mix)") track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Basement Jaxx') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Dem 2') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'14') track.set_title(u'Spirit Of The Sun (Bump & Flex Remix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Lenny Fontana') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Bump & Flex') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'15') track.set_title(u'Glad You Came To Me (Steve Gurley VIP Mix)') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'United Grooves Collective') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Steve Gurley') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'16') track.set_title(u'Call It Fate') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name(u'Richie Dan') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Various-Pure-Silk-The-Third-Dimension/release/463634') r = s.get_result() self.assertEqual(expected, r) def test_weird_subtracks_in_tracklist(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(u'2005') release_event.set_country(u'Netherlands') expected.append_release_event(release_event) expected.set_format(u'CD, Compilation') label_id = expected.create_label_id() label_id.set_label(u'Stubko Entertainment') label_id.append_catalogue_nr(u'255034') expected.append_label_id(label_id) expected.set_title(u'De Beste Liedjes Van Ome Henk') artist = expected.create_artist() artist.set_name(u'Ome Henk') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre(u'Electronic') expected.append_genre(u'Non-Music') expected.append_genre(u'Pop') expected.append_style(u'Comedy') expected.append_style(u'Euro House') expected.append_style(u'Parody') expected.set_url('http://www.discogs.com/Ome-Henk-De-Beste-Liedjes-Van-Ome-Henk/release/755732') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Het Is Weer Tijd Voor Ome Henk! (Radio Tune)') track.set_length(85) track_artist = expected.create_artist() track_artist.set_name(u'Ome Henk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Opblaaskrokodil (Super Extra Mix)') track.set_length(148) track_artist = expected.create_artist() track_artist.set_name(u'Ome Henk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Op De Camping') track.set_length(213) track_artist = expected.create_artist() track_artist.set_name(u'Ome Henk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Ik Zing Dit Lied Voor Ome Henk!') track.set_length(218) track_artist = expected.create_artist() track_artist.set_name(u'Ome Henk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Neem Een Ander In De Maling!') track.set_length(198) track_artist = expected.create_artist() track_artist.set_name(u'Ome Henk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'Mambo Nr. 6') track.set_length(219) track_artist = expected.create_artist() track_artist.set_name(u'Ome Henk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Heftig!') track.set_length(225) track_artist = expected.create_artist() track_artist.set_name(u'Ome Henk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Lekker Lekker (Ga Maar Met Me Mee)') track.set_length(213) track_artist = expected.create_artist() track_artist.set_name(u'Ome Henk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'Samba La Bamba!') track.set_length(184) track_artist = expected.create_artist() track_artist.set_name(u'Ome Henk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u"'T Leven Gaat Niet Over Rozen!") track.set_length(213) track_artist = expected.create_artist() track_artist.set_name(u'Ome Henk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Sieb van der Kast') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'Ome Henk Viert Feest!') track.set_length(236) track_artist = expected.create_artist() track_artist.set_name(u'Ome Henk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'12') track.set_title(u'Sambal Bij?') track.set_length(175) track_artist = expected.create_artist() track_artist.set_name(u'Ome Henk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'13') track.set_title(u'Ik Ben Verkouwe!') track.set_length(234) track_artist = expected.create_artist() track_artist.set_name(u'Ome Henk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'14') track.set_title(u'Sju Tem') track.set_length(195) track_artist = expected.create_artist() track_artist.set_name(u'Ome Henk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Kim Holland') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'15') track.set_title(u'Ploem Ploem Jenka (Hup Hop Versie Met Trea Dobbs)') track.set_length(162) track_artist = expected.create_artist() track_artist.set_name(u'Ome Henk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Trea Dobbs') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'16') track.set_title(u'Aaai Oehoe Aai') track.set_length(191) track_artist = expected.create_artist() track_artist.set_name(u'Ome Henk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'17') track.set_title(u'Oranje!! (We Worden Kampioen!) (Radio Mix)') track.set_length(223) track_artist = expected.create_artist() track_artist.set_name(u'Ome Henk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'18') track.set_title(u'Olee Olee Sinterklaas Is Here To Stay! (Single Versie)') track.set_length(236) track_artist = expected.create_artist() track_artist.set_name(u'Ome Henk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'19') track.set_title(u'Een Heel Gelukkig Kerstfeest') track.set_length(214) track_artist = expected.create_artist() track_artist.set_name(u'Ome Henk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'20') track.set_title(u'Opblaaskrokodil 2005 (Bonustrack)') track.set_length(147) track_artist = expected.create_artist() track_artist.set_name(u'Ome Henk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Ome-Henk-De-Beste-Liedjes-Van-Ome-Henk/release/755732') r = s.get_result() self.assertEqual(expected, r) def test_404(self): expected = NotFoundResult() expected.set_scraper_name(None) s = discogs.ReleaseScraper.from_string('http://www.discogs.com/Various-Gothic-File-14/release/999999999') r = s.get_result() self.assertEqual(expected, r) def test_search_scraper(self): s = discogs.SearchScraper('love') r = s.get_result() self.assertTrue(len(r.get_items()) > 0) class MusicbrainzTest(TestCase): def test_simple_album(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('2004-09-23') release_event.set_country('Germany') expected.append_release_event(release_event) expected.set_format('CD, Album') label_id = expected.create_label_id() label_id.set_label('Trisol') label_id.append_catalogue_nr('TRI 070 CD') expected.append_label_id(label_id) expected.set_title('Hast Du mich vermisst? Der schwarze Schmetterling, Teil I') artist = expected.create_artist() artist.set_name('ASP') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.set_url('http://musicbrainz.org/release/e008606b-a1c9-48ab-8011-5dbf8b874f1b') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Intro: In meiner Vorstellung') track.set_length(274) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Schwarzer Schmetterling') track.set_length(290) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Where Do the Gods Go') track.set_length(226) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Dancing') track.set_length(345) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'K\xfcss mich') track.set_length(311) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'Sing Child') track.set_length(238) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Teach Me War') track.set_length(225) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Imbecile Anthem') track.set_length(222) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'Und wir tanzten (Ungeschickte Liebesbriefe)') track.set_length(304) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u'Blinded') track.set_length(444) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'Where Do the Gods Go (re-unleashed club edit)') track.set_length(279) disc.append_track(track) expected.append_disc(disc) s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/e008606b-a1c9-48ab-8011-5dbf8b874f1b') r = s.get_result() self.assertEqual(expected, r) def test_multiple_cds(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('2007-05-25') release_event.set_country('Germany') expected.append_release_event(release_event) expected.set_format(u'4\xd7CD, Album + Live') label_id = expected.create_label_id() label_id.set_label('Trisol') label_id.append_catalogue_nr('TRI 303 CD') expected.append_label_id(label_id) expected.set_title('Once in a Lifetime') artist = expected.create_artist() artist.set_name('ASP') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) artist = expected.create_artist() artist.set_name('Chamber') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.set_url('http://musicbrainz.org/release/79de4a0c-b469-4dfd-b23c-129462b741fb') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Once in a Lifetime, Part 1') track.set_length(351) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'A Dead Man\u2019s Song') track.set_length(312) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Versuchung') track.set_length(345) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Torn') track.set_length(304) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Demon Love') track.set_length(272) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'The Paperhearted Ghost') track.set_length(283) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'A Tale of Real Love') track.set_length(316) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Hunger') track.set_length(289) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'The Truth About Snow-White') track.set_length(240) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u'She Wore Shadows') track.set_length(276) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'Und wir tanzten (Ungeschickte Liebesbriefe)') track.set_length(317) disc.append_track(track) track = disc.create_track() track.set_number(u'12') track.set_title(u'Once in a Lifetime, Part 2 (reprise)') track.set_length(164) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(2) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'K\xfcss mich') track.set_length(384) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Silence - Release') track.set_length(225) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Solitude') track.set_length(220) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Die Ballade von der Erweckung') track.set_length(527) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Another Conversation') track.set_length(201) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'Sing Child') track.set_length(449) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Ich will brennen') track.set_length(300) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Toscana') track.set_length(374) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'Ride On') track.set_length(222) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u'Hometown') track.set_length(181) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'Werben') track.set_length(293) disc.append_track(track) track = disc.create_track() track.set_number(u'12') track.set_title(u'Once in a Lifetime, Part 3 (Finale)') track.set_length(608) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(3) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'H\xe4sslich') track.set_length(145) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Backstage (All Areas)') track.set_length(573) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Paracetamoltr\xe4ume') track.set_length(517) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Auszug aus \u201eTremendista\u201c') track.set_length(1473) track_artist = expected.create_artist() track_artist.set_name(u'Ralph M\xfcller') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Campari O') track.set_length(159) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(4) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Sicamore Trees (ASP soundcheck out-take)') track.set_length(94) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Demon Love') track.set_length(275) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'The Truth About Snow-White') track.set_length(275) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'She Wore Shadows') track.set_length(319) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Sing Child') track.set_length(469) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'Hometown') track.set_length(221) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Hunger') track.set_length(274) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Silence - Release') track.set_length(208) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'She Moved Through the Fair (ASP soundcheck out-take)') track.set_length(120) disc.append_track(track) expected.append_disc(disc) s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/79de4a0c-b469-4dfd-b23c-129462b741fb') r = s.get_result() self.assertEqual(expected, r) def test_various_artists_and_track_remixer(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('2010') release_event.set_country('Germany') expected.append_release_event(release_event) expected.set_format('CD, Album + Compilation') label_id = expected.create_label_id() label_id.set_label('Batbeliever Releases') label_id.append_catalogue_nr('BAT 065') expected.append_label_id(label_id) expected.set_title('Gothic File 11') artist = expected.create_artist() artist.set_name(None) artist.set_various(True) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.set_url('http://musicbrainz.org/release/9d78a55c-0eee-4b61-b6eb-b69765c37740') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Carrie Satan') track.set_length(312) track_artist = expected.create_artist() track_artist.set_name('Spectra Paris') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Countdown') track.set_length(253) track_artist = expected.create_artist() track_artist.set_name('Absurd Minds') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title(u'M\xe4dchen in Uniform (Faderhead remix)') track.set_length(233) track_artist = expected.create_artist() track_artist.set_name('Nachtmahr') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Faderhead') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Fucking Invective') track.set_length(273) track_artist = expected.create_artist() track_artist.set_name('Noisuf-X') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Loyal to My Hate (Solar Fake remix)') track.set_length(264) track_artist = expected.create_artist() track_artist.set_name(':wumpscut:') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Solar Fake') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Melancholie (382edit)') track.set_length(232) track_artist = expected.create_artist() track_artist.set_name('KiEw') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Gegen die Welt') track.set_length(287) track_artist = expected.create_artist() track_artist.set_name('Mantus') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title("Ready or Not (I'm Coming)") track.set_length(202) track_artist = expected.create_artist() track_artist.set_name('Oomph!') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('What?') track.set_length(166) track_artist = expected.create_artist() track_artist.set_name('Rob Zombie') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Ebenbild (Die Krupps remix)') track.set_length(343) track_artist = expected.create_artist() track_artist.set_name('Megaherz') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Die Krupps') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('11') track.set_title('Vergissmeinnicht (live)') track.set_length(239) track_artist = expected.create_artist() track_artist.set_name('Eisbrecher') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('12') track.set_title('Industrypeople') track.set_length(254) track_artist = expected.create_artist() track_artist.set_name('Zeromancer') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('13') track.set_title('Kick the Bass') track.set_length(222) track_artist = expected.create_artist() track_artist.set_name('Julien-K') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('14') track.set_title('Black Hole') track.set_length(325) track_artist = expected.create_artist() track_artist.set_name('Nosferatu') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('15') track.set_title('Swimming in Dirty Water') track.set_length(264) track_artist = expected.create_artist() track_artist.set_name('Die Art') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('16') track.set_title('Wreckhouse Stomp') track.set_length(184) track_artist = expected.create_artist() track_artist.set_name('Mad Sin') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/9d78a55c-0eee-4b61-b6eb-b69765c37740') r = s.get_result() self.assertEqual(expected, r) def test_disc_titles(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('2008') release_event.set_country('Europe') expected.append_release_event(release_event) expected.set_format(u'5\xd7CD, Album + Compilation') label_id = expected.create_label_id() label_id.set_label('Epic') label_id.append_catalogue_nr('88697304842') expected.append_label_id(label_id) expected.set_title('Original Album Classics') artist = expected.create_artist() artist.set_name('The Isley Brothers') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.set_url('http://musicbrainz.org/release/12c94a0f-828f-4ab3-8e0d-dfe4599dc310') disc = expected.create_disc() disc.set_number(1) disc.set_title('The Brothers: Isley') track = disc.create_track() track.set_number('1') track.set_title('I Turned You On') track.set_length(158) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Vacuum Cleaner') track.set_length(176) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('I Got to Get Myself Together') track.set_length(218) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Was It Good to You?') track.set_length(164) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('The Blacker the Berry (a.k.a. Black Berries)') track.set_length(353) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('My Little Girl') track.set_length(221) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Get Down Off of the Train') track.set_length(192) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Holding On') track.set_length(156) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('Feels Like the World') track.set_length(206) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(2) disc.set_title('Get Into Something') track = disc.create_track() track.set_number('1') track.set_title('Get Into Something') track.set_length(450) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Freedom') track.set_length(218) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Take Inventory') track.set_length(167) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title("Keep on Doin'") track.set_length(242) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Girls Will Be Girls') track.set_length(171) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('I Need You So') track.set_length(265) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('If He Can You Can') track.set_length(225) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('I Got to Find Me One') track.set_length(278) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('Beautiful') track.set_length(186) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Bless Your Heart') track.set_length(183) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(3) disc.set_title("Givin' It Back") track = disc.create_track() track.set_number('1') track.set_title('Ohio - Machine Gun') track.set_length(554) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Fire and Rain') track.set_length(329) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Lay Lady Lay') track.set_length(622) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Spill the Wine') track.set_length(392) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Nothing to Do But Today') track.set_length(219) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Cold Bologna') track.set_length(179) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title("Love the One You're With") track.set_length(219) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(4) disc.set_title('Brother, Brother, Brother') track = disc.create_track() track.set_number('1') track.set_title('Brother, Brother') track.set_length(197) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Put A Little Love In Your Heart') track.set_length(182) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title("Sweet Season / Keep On Walkin'") track.set_length(313) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Work To Do') track.set_length(192) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Pop That Thang') track.set_length(174) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Lay Away') track.set_length(203) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title("It's Too Late") track.set_length(631) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Love Put Me On The Corner') track.set_length(390) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(5) disc.set_title('3 + 3') track = disc.create_track() track.set_number('1') track.set_title('That Lady, Parts 1 & 2') track.set_length(335) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title("Don't Let Me Be Lonely Tonight") track.set_length(239) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('If You Were There') track.set_length(203) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('You Walk Your Way') track.set_length(186) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Listen to the Music') track.set_length(246) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('What It Comes Down To') track.set_length(234) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Sunshine (Go Away Today)') track.set_length(262) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Summer Breeze') track.set_length(372) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('The Highways of My Life') track.set_length(293) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('That Lady (live)') track.set_length(222) disc.append_track(track) expected.append_disc(disc) s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/12c94a0f-828f-4ab3-8e0d-dfe4599dc310') r = s.get_result() self.assertEqual(expected, r) def test_special_sub_heading(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('2012-11-16') release_event.set_country('Germany') expected.append_release_event(release_event) expected.set_format(u'2\xd7CD, EP') label_id = expected.create_label_id() label_id.set_label('Trisol') label_id.append_catalogue_nr('TRI 460 CD') expected.append_label_id(label_id) expected.set_title('Die GeistErfahrer EP: Fremder-Zyklus, Teil 1.1') artist = expected.create_artist() artist.set_name('ASP') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.set_url('http://musicbrainz.org/release/fc6ee7a8-c70a-4c8f-ab42-43a457a0731f') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'GeistErfahrer') track.set_length(360) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'In Sack und Asche') track.set_length(440) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'\xdcberH\xe4rte') track.set_length(376) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Carpe noctem') track.set_length(312) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Weichen(t)stellung (GeistErfahrer Reprise)') track.set_length(274) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'Danach') track.set_length(516) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(2) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Sing Child') track.set_length(404) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'Duett (Minnelied der Incubi)') track.set_length(251) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Krabat') track.set_length(358) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Unverwandt') track.set_length(667) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Werben') track.set_length(440) disc.append_track(track) expected.append_disc(disc) s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/fc6ee7a8-c70a-4c8f-ab42-43a457a0731f') r = s.get_result() self.assertEqual(expected, r) def test_release_group(self): expected = ListResult() expected.set_scraper_name(None) item = expected.create_item() item.set_name(u'ASP & Chamber \u2013 Humility') item.set_info(u'CD | Tracks: 8 | Date: 2006-04-21 | Country: DE | Barcode: 4260063942730') item.set_query('https://musicbrainz.org/release/58bad121-bfab-4dda-89f8-4b1bc092de44') item.set_url('https://musicbrainz.org/release/58bad121-bfab-4dda-89f8-4b1bc092de44') expected.append_item(item) item = expected.create_item() item.set_name(u'ASP & Chamber \u2013 Humility: Die verschollenen Archive 2') item.set_info(u'CD | Tracks: 7 | Date: 2012-03-09 | Country: DE | Barcode: 4260063944505') item.set_query('https://musicbrainz.org/release/c2834b8b-77c4-4505-9b55-a31208eb98c3') item.set_url('https://musicbrainz.org/release/c2834b8b-77c4-4505-9b55-a31208eb98c3') expected.append_item(item) s = musicbrainz.ReleaseGroupScraper.from_string('http://musicbrainz.org/release-group/9c8ec90f-dcef-3fc7-904d-27f535454e44') r = s.get_result() self.assertEqual(expected, r) def test_vinyl_not_cd_in_sub_heading(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('2004-02-24') release_event.set_country('United States') expected.append_release_event(release_event) expected.set_format('12" Vinyl, Album') label_id = expected.create_label_id() label_id.set_label('Sundazed Music') label_id.append_catalogue_nr('LP 5103') expected.append_label_id(label_id) expected.set_title('Four Sail') artist = expected.create_artist() artist.set_name('Love') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.set_url('http://musicbrainz.org/release/bdb4ba37-bb4b-3d2a-bd58-c109dc4d72f0') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('A1') track.set_title('August') track.set_length(300) disc.append_track(track) track = disc.create_track() track.set_number('A2') track.set_title("Your Friend and Mine - Neil's Song") track.set_length(220) disc.append_track(track) track = disc.create_track() track.set_number('A3') track.set_title("I'm With You") track.set_length(165) disc.append_track(track) track = disc.create_track() track.set_number('A4') track.set_title('Good Times') track.set_length(210) disc.append_track(track) track = disc.create_track() track.set_number('A5') track.set_title('Singing Cowboy') track.set_length(270) disc.append_track(track) track = disc.create_track() track.set_number('B1') track.set_title('Dream') track.set_length(169) disc.append_track(track) track = disc.create_track() track.set_number('B2') track.set_title('Robert Montgomery') track.set_length(214) disc.append_track(track) track = disc.create_track() track.set_number('B3') track.set_title('Nothing') track.set_length(284) disc.append_track(track) track = disc.create_track() track.set_number('B4') track.set_title('Talking in My Sleep') track.set_length(170) disc.append_track(track) track = disc.create_track() track.set_number('B5') track.set_title('Always See Your Face') track.set_length(210) disc.append_track(track) expected.append_disc(disc) s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/bdb4ba37-bb4b-3d2a-bd58-c109dc4d72f0') r = s.get_result() self.assertEqual(expected, r) def test_medium_in_sub_heading(self): expected = ReleaseResult() expected.set_scraper_name(None) expected.set_format('Album') expected.set_title('Welcome to the Dopehouse') artist = expected.create_artist() artist.set_name('The Dayton Family') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.set_url('http://musicbrainz.org/release/0e3b3c85-61b6-4a07-852b-26f7e8dd0ade') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Intro') track.set_length(93) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Big Mac 11') track.set_length(276) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Do You Remember?') track.set_length(200) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Welcome to Flint') track.set_length(228) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Feds') track.set_length(237) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Gangstarism') track.set_length(251) track_artist = expected.create_artist() track_artist.set_name('Goldfish') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Young Thugs') track.set_length(241) track_artist = expected.create_artist() track_artist.set_name('Ghetto E') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Lori') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Drugstore') track.set_length(207) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('Set Up') track.set_length(257) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('We Keep It Ghetto') track.set_length(200) disc.append_track(track) track = disc.create_track() track.set_number('11') track.set_title('Dope House') track.set_length(231) disc.append_track(track) track = disc.create_track() track.set_number('12') track.set_title('Shadows') track.set_length(242) track_artist = expected.create_artist() track_artist.set_name('Kalonda & Ryan') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('13') track.set_title('Outlaws') track.set_length(226) disc.append_track(track) track = disc.create_track() track.set_number('14') track.set_title('Weed Song') track.set_length(228) disc.append_track(track) track = disc.create_track() track.set_number('15') track.set_title('Simple Wish') track.set_length(206) disc.append_track(track) track = disc.create_track() track.set_number('16') track.set_title('Outro') track.set_length(96) disc.append_track(track) expected.append_disc(disc) s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/0e3b3c85-61b6-4a07-852b-26f7e8dd0ade') r = s.get_result() self.assertEqual(expected, r) def test_digital_media_in_sub_heading(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('2013-04-08') release_event.set_country('Sweden') expected.append_release_event(release_event) expected.set_format('Digital Media, Album') expected.set_title(u'J\xe4rnb\xe4rarland') artist = expected.create_artist() artist.set_name('Stiko Per Larsson') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.set_url('http://musicbrainz.org/release/d37fc56e-4b9b-4c4c-9e9e-5d6d5a66944c') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'Zombien f\xf6ds') track.set_length(167) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'J\xe4rnb\xe4rarland') track.set_length(219) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Veteraner') track.set_length(197) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'Resande Man') track.set_length(192) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Du h\xf6r inte hemma h\xe4r') track.set_length(185) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'Irrbloss') track.set_length(187) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'J\xe4mtlands president') track.set_length(218) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Gilla falla') track.set_length(257) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'Inga v\xe4gar') track.set_length(259) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u'Krus & detaljer') track.set_length(241) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'En kosmonauts testamente') track.set_length(375) disc.append_track(track) expected.append_disc(disc) s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/d37fc56e-4b9b-4c4c-9e9e-5d6d5a66944c') r = s.get_result() self.assertEqual(expected, r) def test_other_in_sub_heading(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('1999') release_event.set_country('United States') expected.append_release_event(release_event) expected.set_format('Other, Album') label_id = expected.create_label_id() label_id.set_label('Epic') expected.append_label_id(label_id) expected.set_title('Retro Futuristo') artist = expected.create_artist() artist.set_name('Jack Herrera') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.set_url('http://musicbrainz.org/release/020fc291-af3e-45d7-a2f3-212d42fc260b') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'City Lights') track.set_length(313) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'What U Feel') track.set_length(295) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'Diamond in the Rough') track.set_length(297) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'High Off You') track.set_length(299) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'Say You Gotta Man') track.set_length(262) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'Jack Shuffle (feat. Will.I.Am)') track.set_length(240) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'Up Above My Head') track.set_length(320) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'Jack Herrera for President') track.set_length(272) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'Free to Believe') track.set_length(329) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u'For You') track.set_length(378) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'Silver & Gold (feat. Black Thought)') track.set_length(316) disc.append_track(track) track = disc.create_track() track.set_number(u'12') track.set_title(u'Revolution (interlude)') track.set_length(78) disc.append_track(track) track = disc.create_track() track.set_number(u'13') track.set_title(u'Revolution') track.set_length(315) disc.append_track(track) track = disc.create_track() track.set_number(u'14') track.set_title(u'Be Free') track.set_length(341) disc.append_track(track) track = disc.create_track() track.set_number(u'15') track.set_title(u'D\xe9ja Vu') track.set_length(218) disc.append_track(track) track = disc.create_track() track.set_number(u'16') track.set_title(u'Jewel') track.set_length(339) disc.append_track(track) expected.append_disc(disc) s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/020fc291-af3e-45d7-a2f3-212d42fc260b') r = s.get_result() self.assertEqual(expected, r) def test_dvd_and_cd_in_sub_heading(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('2009-01-14') release_event.set_country('Japan') expected.append_release_event(release_event) expected.set_format('CD + DVD-Video, Album + Compilation') label_id = expected.create_label_id() label_id.set_label(u'MusicRay\u2019n') label_id.append_catalogue_nr('SMCL-163') expected.append_label_id(label_id) label_id = expected.create_label_id() label_id.set_label(u'MusicRay\u2019n') label_id.append_catalogue_nr('SMCL-164') expected.append_label_id(label_id) expected.set_title('CODE GEASS COMPLETE BEST') artist = expected.create_artist() artist.set_name(None) artist.set_various(True) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.set_url('http://musicbrainz.org/release/f6fe7f52-b0c8-4bd8-af06-68af909e09ca') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'C.C.\u30e2\u30ce\u30ed\u30fc\u30b0 1') track.set_length(31) track_artist = expected.create_artist() track_artist.set_name(u'C.C.') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'\u3086\u304b\u306a') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'COLORS') track.set_length(219) track_artist = expected.create_artist() track_artist.set_name(u'FLOW') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'\u52c7\u4fa0\u9752\u6625\u8b33') track.set_length(262) track_artist = expected.create_artist() track_artist.set_name(u'ALI PROJECT') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'C.C.\u30e2\u30ce\u30ed\u30fc\u30b0 2') track.set_length(35) track_artist = expected.create_artist() track_artist.set_name(u'C.C.') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'\u3086\u304b\u306a') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'\u89e3\u8aad\u4e0d\u80fd') track.set_length(186) track_artist = expected.create_artist() track_artist.set_name(u'\u30b8\u30f3') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'\u30e2\u30b6\u30a4\u30af\u30ab\u30b1\u30e9') track.set_length(278) track_artist = expected.create_artist() track_artist.set_name(u'SunSet Swish') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'\u77b3\u30ce\u7ffc') track.set_length(212) track_artist = expected.create_artist() track_artist.set_name(u'access') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'C.C.\u30e2\u30ce\u30ed\u30fc\u30b0 3') track.set_length(35) track_artist = expected.create_artist() track_artist.set_name(u'C.C.') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'\u3086\u304b\u306a') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'O2 \u301c\u30aa\u30fc\u30fb\u30c4\u30fc\u301c') track.set_length(238) track_artist = expected.create_artist() track_artist.set_name(u'ORANGE RANGE') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u'\u30b7\u30a2\u30ef\u30bb\u30cd\u30a4\u30ed') track.set_length(259) track_artist = expected.create_artist() track_artist.set_name(u'ORANGE RANGE') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'C.C.\u30e2\u30ce\u30ed\u30fc\u30b0 4') track.set_length(32) track_artist = expected.create_artist() track_artist.set_name(u'C.C.') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'\u3086\u304b\u306a') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'12') track.set_title(u'WORLD END') track.set_length(229) track_artist = expected.create_artist() track_artist.set_name(u'FLOW') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'13') track.set_title(u'\u308f\u304c\u81c8\u305f\u3057\u60aa\u306e\u83ef') track.set_length(273) track_artist = expected.create_artist() track_artist.set_name(u'ALI PROJECT') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'14') track.set_title(u'C.C.\u30e2\u30ce\u30ed\u30fc\u30b0 5') track.set_length(46) track_artist = expected.create_artist() track_artist.set_name(u'C.C.') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'\u3086\u304b\u306a') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(2) disc.set_title(None) track = disc.create_track() track.set_number(u'1') track.set_title(u'COLORS type1') track.set_length(93) track_artist = expected.create_artist() track_artist.set_name(u'FLOW') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'2') track.set_title(u'COLORS type2') track.set_length(93) track_artist = expected.create_artist() track_artist.set_name(u'FLOW') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'3') track.set_title(u'\u52c7\u4fa0\u9752\u6625\u8b33') track.set_length(93) track_artist = expected.create_artist() track_artist.set_name(u'ALI PROJECT') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'4') track.set_title(u'\u89e3\u8aad\u4e0d\u80fd type1') track.set_length(93) track_artist = expected.create_artist() track_artist.set_name(u'\u30b8\u30f3') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'5') track.set_title(u'\u89e3\u8aad\u4e0d\u80fd type2') track.set_length(93) track_artist = expected.create_artist() track_artist.set_name(u'\u30b8\u30f3') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'6') track.set_title(u'\u30e2\u30b6\u30a4\u30af\u30ab\u30b1\u30e9 type1') track.set_length(93) track_artist = expected.create_artist() track_artist.set_name(u'SunSet Swish') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'7') track.set_title(u'\u30e2\u30b6\u30a4\u30af\u30ab\u30b1\u30e9 type2') track.set_length(93) track_artist = expected.create_artist() track_artist.set_name(u'SunSet Swish') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'8') track.set_title(u'\u77b3\u30ce\u7ffc') track.set_length(93) track_artist = expected.create_artist() track_artist.set_name(u'access') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'9') track.set_title(u'O2 \u301c\u30aa\u30fc\u30fb\u30c4\u30fc\u301c') track.set_length(93) track_artist = expected.create_artist() track_artist.set_name(u'ORANGE RANGE') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'10') track.set_title(u'\u30b7\u30a2\u30ef\u30bb\u30cd\u30a4\u30ed') track.set_length(96) track_artist = expected.create_artist() track_artist.set_name(u'ORANGE RANGE') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'11') track.set_title(u'WORLD END type1') track.set_length(93) track_artist = expected.create_artist() track_artist.set_name(u'FLOW') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'12') track.set_title(u'WORLD END type2') track.set_length(93) track_artist = expected.create_artist() track_artist.set_name(u'FLOW') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'13') track.set_title(u'\u308f\u304c\ufa1f\u305f\u3057\u60aa\u306e\u83ef type1') track.set_length(96) track_artist = expected.create_artist() track_artist.set_name(u'ALI PROJECT') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number(u'14') track.set_title(u'\u308f\u304c\ufa1f\u305f\u3057\u60aa\u306e\u83ef type2') track.set_length(96) track_artist = expected.create_artist() track_artist.set_name(u'ALI PROJECT') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/f6fe7f52-b0c8-4bd8-af06-68af909e09ca') r = s.get_result() self.assertEqual(expected, r) def test_cdr_in_sub_heading(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('2013-06-27') release_event.set_country('United States') expected.append_release_event(release_event) expected.set_format('CD-R, Album') label_id = expected.create_label_id() label_id.set_label('[no label]') expected.append_label_id(label_id) expected.set_title('Thaw') artist = expected.create_artist() artist.set_name('Buckethead') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.set_url('http://musicbrainz.org/release/dd3f9b98-364c-4da0-b4d7-c79f1c20f1e6') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('[untitled]') track.set_length(486) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('[untitled]') track.set_length(147) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('[untitled]') track.set_length(191) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('[untitled]') track.set_length(166) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('[untitled]') track.set_length(185) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('[untitled]') track.set_length(413) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('[untitled]') track.set_length(133) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('[untitled]') track.set_length(113) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('[untitled]') track.set_length(91) disc.append_track(track) expected.append_disc(disc) s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/dd3f9b98-364c-4da0-b4d7-c79f1c20f1e6') r = s.get_result() self.assertEqual(expected, r) def test_featuring_release_artists(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('2012-03-19') release_event.set_country('United Kingdom') expected.append_release_event(release_event) expected.set_format('CD, Single') label_id = expected.create_label_id() label_id.set_label('Interscope Records') label_id.append_catalogue_nr('0602527974569') expected.append_label_id(label_id) expected.set_title("Give Me All Your Luvin'") artist = expected.create_artist() artist.set_name('Madonna') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) artist = expected.create_artist() artist.set_name('Nicki Minaj') artist.set_various(False) artist.append_type(expected.ArtistTypes.FEATURING) expected.append_release_artist(artist) artist = expected.create_artist() artist.set_name('M.I.A.') artist.set_various(False) artist.append_type(expected.ArtistTypes.FEATURING) expected.append_release_artist(artist) expected.set_url('http://musicbrainz.org/release/37df7664-0924-4594-8d07-9d48fa47ced0') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title("Give Me All Your Luvin'") track.set_length(202) track_artist = expected.create_artist() track_artist.set_name('Nicki Minaj') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('M.I.A.') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title("Give Me All Your Luvin' (Party Rock remix)") track.set_length(243) track_artist = expected.create_artist() track_artist.set_name('LMFAO') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Nicki Minaj') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/37df7664-0924-4594-8d07-9d48fa47ced0') r = s.get_result() self.assertEqual(expected, r) def test_404(self): expected = NotFoundResult() expected.set_scraper_name(None) s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/12345-abcdefg') r = s.get_result() self.assertEqual(expected, r) def test_search_scraper(self): s = musicbrainz.SearchScraper('love') r = s.get_result() self.assertTrue(len(r.get_items()) > 0) class BeatportTest(TestCase): def test_remix_track_artist(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('2006-04-19') release_event.set_country(None) expected.append_release_event(release_event) expected.set_format(None) label_id = expected.create_label_id() label_id.set_label('Karatemusik') label_id.append_catalogue_nr('KM013') expected.append_label_id(label_id) expected.set_title('Love Spy / Love Dies') artist = expected.create_artist() artist.set_name(u'Polygamy Boys') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre('Electro House') expected.append_genre('Tech House') expected.set_url('http://www.beatport.com/release/love-spy-love-dies/27944') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title(u'Love Spy / Love Dies [Error Error Remix]') track.set_length(447) track_artist = expected.create_artist() track_artist.set_name('Error Error') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Love Spy / Love Dies') track.set_length(427) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Reply 23') track.set_length(418) disc.append_track(track) expected.append_disc(disc) s = beatport.ReleaseScraper.from_string('http://www.beatport.com/release/love-spy-love-dies/27944') r = s.get_result() self.assertEqual(expected, r) def test_various_artists(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('2012-01-05') release_event.set_country(None) expected.append_release_event(release_event) expected.set_format(None) label_id = expected.create_label_id() label_id.set_label('Carlo Cavalli Music Group') label_id.append_catalogue_nr('CMG117') expected.append_label_id(label_id) expected.set_title('DJ Tunes Compilation') artist = expected.create_artist() artist.set_name(None) artist.set_various(True) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre('Progressive House') expected.append_genre('House') expected.append_genre('Deep House') expected.append_genre('Tech House') expected.append_genre('Minimal') expected.set_url('http://www.beatport.com/release/dj-tunes-compilation/851318') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Forever Loved') track.set_length(320) track_artist = expected.create_artist() track_artist.set_name('Sam Be-Kay') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title(u'Sweep [Alex Faraci Remix]') track.set_length(398) track_artist = expected.create_artist() track_artist.set_name('Eros Locatelli') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Alex Faraci') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title(u'Humo Y Neon [David Ahumada Remix]') track.set_length(298) track_artist = expected.create_artist() track_artist.set_name('Babette Duwez') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Joel Reichert') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('David Ahumada') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title(u'Night Melody [Massimo Russo La Guitarra Remix]') track.set_length(377) track_artist = expected.create_artist() track_artist.set_name('Alex Faraci') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Massimo Russo') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('30 m') track.set_length(393) track_artist = expected.create_artist() track_artist.set_name('Fingers Clear') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Just Begin') track.set_length(429) track_artist = expected.create_artist() track_artist.set_name('Erion Gjuzi') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Achakkar') track.set_length(388) track_artist = expected.create_artist() track_artist.set_name('Dany Cohiba') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title(u'Raveline [Italianbeat Guys Remix]') track.set_length(406) track_artist = expected.create_artist() track_artist.set_name('Massimo Russo') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Italianbeat Guys') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title(u'Grey 2 Fade feat. Babette Duwez [Jurgen Cecconi Mix]') track.set_length(653) track_artist = expected.create_artist() track_artist.set_name('Jurgen Cecconi') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Beethoven Tbs') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Tanzmania') track.set_length(420) track_artist = expected.create_artist() track_artist.set_name('Carlo Cavalli') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) s = beatport.ReleaseScraper.from_string('http://www.beatport.com/release/dj-tunes-compilation/851318') r = s.get_result() self.assertEqual(expected, r) def test_release_with_empty_track_length(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('2008-10-13') release_event.set_country(None) expected.append_release_event(release_event) expected.set_format(None) label_id = expected.create_label_id() label_id.set_label('Bedrock Records') label_id.append_catalogue_nr('BEDPPF10') expected.append_label_id(label_id) expected.set_title('Bedrock 10: Past Present Future') artist = expected.create_artist() artist.set_name(None) artist.set_various(True) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre('Progressive House') expected.append_genre('House') expected.append_genre('Breaks') expected.append_genre('Techno') expected.append_genre('Tech House') expected.set_url('http://www.beatport.com/release/bedrock-10-past-present-future/138250') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title(u'Past Present Future (Part 1) [Continuous DJ Mix]') track.set_length(4454) track_artist = expected.create_artist() track_artist.set_name('John Digweed') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title(u'Emerald [Seismic Dub]') track.set_length(501) track_artist = expected.create_artist() track_artist.set_name('Bedrock') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Seizmic') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title(u'Imagine [Estroe Remix]') track.set_length(383) track_artist = expected.create_artist() track_artist.set_name('Fortunato & Montresor') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Estroe') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title(u'Dust [Charlie Mayhem Cold Shoulder Dub]') track.set_length(540) track_artist = expected.create_artist() track_artist.set_name('Pole Folder') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('CP') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Charlie May') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title(u'Chutney [Tom Mangan 2008 Remix]') track.set_length(483) track_artist = expected.create_artist() track_artist.set_name('Tom Mangan') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title(u'Rise In [Steve Lawler Powder Powder Remix]') track.set_length(712) track_artist = expected.create_artist() track_artist.set_name('Steve Lawler') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title(u'Santiago [Chab Remix]') track.set_length(522) track_artist = expected.create_artist() track_artist.set_name('Bedrock') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Chab') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title(u'Warung Beach [Jamie Stevens Remix]') track.set_length(516) track_artist = expected.create_artist() track_artist.set_name('John Digweed') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Jamie Stevens') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title(u'Emerald [Seismic Dub]') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name('Bedrock') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Charlie May') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title(u"Dirtbox [Bruce Aisher's Resurrection Rewind Mix]") track.set_length(464) track_artist = expected.create_artist() track_artist.set_name('Gutterstylz') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Bruce Aisher') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('11') track.set_title(u'Repercussion [Danny Howells Remix]') track.set_length(594) track_artist = expected.create_artist() track_artist.set_name('Science Dept.') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Danny Howells') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('12') track.set_title(u"Beautiful Strange [John Digweed & Nick Muir's Even Stranger Remix]") track.set_length(439) track_artist = expected.create_artist() track_artist.set_name('Nick Muir') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('John Digweed') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('13') track.set_title(u'Save Me [James Talk Remix]') track.set_length(485) track_artist = expected.create_artist() track_artist.set_name('Guy J') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('James Talk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('14') track.set_title(u'Heaven Scent [M.O.D.E. Remix]') track.set_length(675) track_artist = expected.create_artist() track_artist.set_name('Bedrock') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('M.O.D.E.') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('15') track.set_title(u'Past Present Future (Part 2) [Continuous DJ Mix]') track.set_length(4333) track_artist = expected.create_artist() track_artist.set_name('John Digweed') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('16') track.set_title(u'Forge [Tom Middleton Remix]') track.set_length(481) track_artist = expected.create_artist() track_artist.set_name('Bedrock') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Tom Middleton') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('17') track.set_title('Empathy') track.set_length(478) track_artist = expected.create_artist() track_artist.set_name('Jim Rivers') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('18') track.set_title(u'Pushin Too Hard [Bruce Aisher Remix]') track.set_length(460) track_artist = expected.create_artist() track_artist.set_name('Saints & Sinners') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Bruce Aisher') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('19') track.set_title(u'U Get So Give [Paolo Mojo Remix]') track.set_length(497) track_artist = expected.create_artist() track_artist.set_name('Moonface') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Paolo Mojo') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('20') track.set_title(u'Chilling Moments [Kazell Influx Audio Remix]') track.set_length(422) track_artist = expected.create_artist() track_artist.set_name('Shmuel Flash') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Kazell') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('21') track.set_title(u'K10 [Alan Fitzpatrick Remix]') track.set_length(512) track_artist = expected.create_artist() track_artist.set_name('Misstress Barbara') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Alan Fitzpatrick') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('22') track.set_title(u'Chutney [Size 9 Reinterpretation]') track.set_length(506) track_artist = expected.create_artist() track_artist.set_name('Tom Mangan') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('WiNK') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('23') track.set_title('Stoppage Time') track.set_length(636) track_artist = expected.create_artist() track_artist.set_name('Guy Gerber') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('24') track.set_title('Six Hours Later') track.set_length(459) track_artist = expected.create_artist() track_artist.set_name('Bradler') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Dualton') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('25') track.set_title(u'All I Know feat. Astrid Suryanto [Jim Rivers Space Disco Remix]') track.set_length(476) track_artist = expected.create_artist() track_artist.set_name('Morgan Page') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Gregory Shiff') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Jim Rivers') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('26') track.set_title('Gravitation') track.set_length(323) track_artist = expected.create_artist() track_artist.set_name('Nick Muir') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('27') track.set_title(u'Rise In [Steve Lawler Powder Powder Remix (Edit)]') track.set_length(298) track_artist = expected.create_artist() track_artist.set_name('Steve Lawler') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('28') track.set_title(u"Santiago [Guy Gerber's Hotrod Dub]") track.set_length(512) track_artist = expected.create_artist() track_artist.set_name('Bedrock') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Guy Gerber') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('29') track.set_title('Sensei') track.set_length(441) track_artist = expected.create_artist() track_artist.set_name("Funk D'Void") track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Toby Izui') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('30') track.set_title('Madhouse') track.set_length(649) track_artist = expected.create_artist() track_artist.set_name('Pindrop') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('31') track.set_title(u'Lunar [Late Night Friday Remix]') track.set_length(349) track_artist = expected.create_artist() track_artist.set_name('Guy J') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) s = beatport.ReleaseScraper.from_string('http://www.beatport.com/release/bedrock-10-past-present-future/138250') r = s.get_result() self.assertEqual(expected, r) def test_release_with_various_artists_in_track_artist(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('2013-07-10') release_event.set_country(None) expected.append_release_event(release_event) expected.set_format(None) label_id = expected.create_label_id() label_id.set_label('040 Recordings') label_id.append_catalogue_nr('040REC012C') expected.append_label_id(label_id) expected.set_title('040 Recordings Ibiza 2013 Vol. 1') artist = expected.create_artist() artist.set_name(None) artist.set_various(True) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre('Minimal') expected.append_genre('Tech House') expected.set_url('http://www.beatport.com/release/040-recordings-ibiza-2013-vol-1/1113652') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Japanese Floor') track.set_length(422) track_artist = expected.create_artist() track_artist.set_name('Cudder & Mulder') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Persian') track.set_length(379) track_artist = expected.create_artist() track_artist.set_name('Carlo Ruetz') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Me And My Violin') track.set_length(483) track_artist = expected.create_artist() track_artist.set_name('Andree Wischnewski') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Aurra Che') track.set_length(432) track_artist = expected.create_artist() track_artist.set_name('Dompe') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Cuture Club') track.set_length(432) track_artist = expected.create_artist() track_artist.set_name('Debirski') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Yaman!') track.set_length(435) track_artist = expected.create_artist() track_artist.set_name('Robbe Rabone') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Flint Westwood') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title(u'Jazzy Groovie [Thomas Lizzara Remix]') track.set_length(409) track_artist = expected.create_artist() track_artist.set_name('Kanzler & Wischnewski') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Thomas Lizzara') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Alright') track.set_length(416) track_artist = expected.create_artist() track_artist.set_name('Dompe') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title(u'The Sun Ft. Jan Hilde [Heinrich & Heine Remix]') track.set_length(372) track_artist = expected.create_artist() track_artist.set_name('NECRO & Reichmann') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Heinrich & Heine') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.REMIXER) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Libre') track.set_length(376) track_artist = expected.create_artist() track_artist.set_name('Neal Porter') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('11') track.set_title('Closer') track.set_length(430) track_artist = expected.create_artist() track_artist.set_name('Heinrich & Heine') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('12') track.set_title('Society Today') track.set_length(343) track_artist = expected.create_artist() track_artist.set_name('Maurice Deek') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('13') track.set_title('Music For The People') track.set_length(428) track_artist = expected.create_artist() track_artist.set_name('Heinrich & Heine') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('14') track.set_title('Dont Touch My Phone') track.set_length(472) track_artist = expected.create_artist() track_artist.set_name('Eric Kanzler') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('15') track.set_title('Glare') track.set_length(399) track_artist = expected.create_artist() track_artist.set_name('Heinrich & Heine') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('16') track.set_title(u'040 Recordings Ibiza 2013 Vol. 1 [Heinrich & Heine In The Mix]') track.set_length(4440) disc.append_track(track) expected.append_disc(disc) s = beatport.ReleaseScraper.from_string('http://www.beatport.com/release/040-recordings-ibiza-2013-vol-1/1113652') r = s.get_result() self.assertEqual(expected, r) def test_404(self): expected = NotFoundResult() expected.set_scraper_name(None) s = beatport.ReleaseScraper.from_string('http://www.beatport.com/release/blubb/123') r = s.get_result() self.assertEqual(expected, r) def test_search_scraper(self): s = beatport.SearchScraper('love') r = s.get_result() self.assertTrue(len(r.get_items()) > 0) class MetalarchivesTest(TestCase): def test_simple_album(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('June 24th, 2002') release_event.set_country(None) expected.append_release_event(release_event) expected.set_format('Full-length, CD') label_id = expected.create_label_id() label_id.set_label('Spinefarm Records') label_id.append_catalogue_nr('spi149CD / 018459-2') expected.append_label_id(label_id) expected.set_title('Century Child') artist = expected.create_artist() artist.set_name('Nightwish') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.set_url('http://www.metal-archives.com/albums/Nightwish/Century_Child/3719') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Bless the Child') track.set_length(372) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('End of All Hope') track.set_length(235) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Dead to the World') track.set_length(260) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Ever Dream') track.set_length(284) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Slaying the Dreamer') track.set_length(272) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Forever Yours') track.set_length(230) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Ocean Soul') track.set_length(255) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Feel for You') track.set_length(235) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('The Phantom of the Opera') track.set_length(250) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Beauty of the Beast') track.set_length(622) disc.append_track(track) expected.append_disc(disc) s = metalarchives.ReleaseScraper.from_string('http://www.metal-archives.com/albums/Nightwish/Century_Child/3719') r = s.get_result() self.assertEqual(expected, r) def test_multiple_cds(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('September 22nd, 2008') release_event.set_country(None) expected.append_release_event(release_event) expected.set_format(u'Live album, 2CD') label_id = expected.create_label_id() label_id.set_label('GUN Records') label_id.append_catalogue_nr('88697 342672') expected.append_label_id(label_id) expected.set_title('Black Symphony') artist = expected.create_artist() artist.set_name('Within Temptation') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.set_url('http://www.metal-archives.com/albums/Within_Temptation/Black_Symphony/212779') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Ouverture') track.set_length(463) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title("Jillian (I'd Give My Heart)") track.set_length(279) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('The Howling') track.set_length(391) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Stand My Ground') track.set_length(273) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('The Cross') track.set_length(322) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('What Have You Done?') track.set_length(298) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Hand of Sorrow') track.set_length(340) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('The Heart of Everything') track.set_length(348) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('Forgiven') track.set_length(293) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Somewhere') track.set_length(264) disc.append_track(track) track = disc.create_track() track.set_number('11') track.set_title('The Swan Song') track.set_length(240) disc.append_track(track) track = disc.create_track() track.set_number('12') track.set_title('Memories') track.set_length(243) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(2) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Our Solemn Hour') track.set_length(322) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('The Other Half (of Me)') track.set_length(304) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Frozen') track.set_length(360) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('The Promise') track.set_length(272) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Angels') track.set_length(495) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Mother Earth') track.set_length(242) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('The Truth Beneath the Rose') track.set_length(443) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Deceiver of Fools') track.set_length(458) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('All I Need') track.set_length(295) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Ice Queen') track.set_length(435) disc.append_track(track) expected.append_disc(disc) s = metalarchives.ReleaseScraper.from_string('http://www.metal-archives.com/albums/Within_Temptation/Black_Symphony/212779') r = s.get_result() self.assertEqual(expected, r) def test_multiple_release_artists(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('April 14th, 2007') release_event.set_country(None) expected.append_release_event(release_event) expected.set_format('Split, 7" vinyl (45 RPM)') label_id = expected.create_label_id() label_id.set_label('New Iron Age Records') label_id.append_catalogue_nr('NIA 002') expected.append_label_id(label_id) expected.set_title('Clash of Iron Vol. I - Live at Keep It True') artist = expected.create_artist() artist.set_name('Manilla Road') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) artist = expected.create_artist() artist.set_name('Brocas Helm') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.set_url('http://www.metal-archives.com/albums/Manilla_Road/Clash_of_Iron_Vol._I_-_Live_at_Keep_It_True/147439') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Manilla Road - Death by the Hammer') track.set_length(None) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Brocas Helm - Ravenwreck') track.set_length(None) disc.append_track(track) expected.append_disc(disc) s = metalarchives.ReleaseScraper.from_string('http://www.metal-archives.com/albums/Manilla_Road/Clash_of_Iron_Vol._I_-_Live_at_Keep_It_True/147439') r = s.get_result() self.assertEqual(expected, r) def test_404(self): expected = NotFoundResult() expected.set_scraper_name(None) s = metalarchives.ReleaseScraper.from_string('http://www.metal-archives.com/albums/Within_Temptation/Black_Symphony/999999999') r = s.get_result() self.assertEqual(expected, r) def test_search_scraper(self): s = metalarchives.SearchScraper('love') r = s.get_result() self.assertTrue(len(r.get_items()) > 0) class JunodownloadTest(TestCase): def test_simple_album(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('3 July, 2011') release_event.set_country(None) expected.append_release_event(release_event) expected.set_format(None) label_id = expected.create_label_id() label_id.set_label('3 Beat') label_id.append_catalogue_nr('3BEAT 051') expected.append_label_id(label_id) expected.set_title('Love') artist = expected.create_artist() artist.set_name('Inna') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre('Funky') expected.append_genre('Club House') expected.set_url('http://www.junodownload.com/products/love/1774811-02/') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Love (UK radio edit)') track.set_length(151) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Love (club mix)') track.set_length(299) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Love (eSquire radio edit)') track.set_length(233) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Love (eSquire mix)') track.set_length(357) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Love (7th Heaven radio edit)') track.set_length(230) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Love (7th Heaven mix)') track.set_length(394) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Love (Dandeej mix)') track.set_length(315) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Love (DJ Andi mix)') track.set_length(341) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('Love (Klubfiller mix)') track.set_length(395) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Love (Klubfiller dub mix)') track.set_length(389) disc.append_track(track) expected.append_disc(disc) s = junodownload.ReleaseScraper.from_string('http://www.junodownload.com/products/love/1774811-02/') r = s.get_result() self.assertEqual(expected, r) def test_featuring_main_artist(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('12 November, 2010') release_event.set_country(None) expected.append_release_event(release_event) expected.set_format(None) label_id = expected.create_label_id() label_id.set_label('Staff Productions') label_id.append_catalogue_nr('SFP 012') expected.append_label_id(label_id) expected.set_title('Love') artist = expected.create_artist() artist.set_name('Mustafa') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) artist = expected.create_artist() artist.set_name('Tasita D mour') artist.set_various(False) artist.append_type(expected.ArtistTypes.FEATURING) expected.append_release_artist(artist) expected.append_genre('Broken Beat') expected.append_genre('Nu Jazz') expected.set_url('http://www.junodownload.com/products/love/1662955-02/') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Love (original Miami mix)') track.set_length(301) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title("Love (Mustafa's Deep Piano mix)") track.set_length(308) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Love (D-Malice Afro-edit vocal)') track.set_length(381) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Love (RY meets Mustafa vocal mix)') track.set_length(365) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Love (Ospina & Oscar P remix)') track.set_length(365) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Love (Ospina & Oscar P Drum dub)') track.set_length(365) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Love (Steven Stone remix)') track.set_length(389) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Love (David Mateo & Rafix club mix)') track.set_length(297) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('Love (Rafael Yapudjian Meets RyB remix)') track.set_length(449) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Love (acoustic mix)') track.set_length(232) disc.append_track(track) track = disc.create_track() track.set_number('11') track.set_title('Love (D-Malice Afro edit instrumental)') track.set_length(381) disc.append_track(track) track = disc.create_track() track.set_number('12') track.set_title('Love (Ospina & Oscar P intru-mental)') track.set_length(365) disc.append_track(track) track = disc.create_track() track.set_number('13') track.set_title('Love (Steven Stone instrumental remix)') track.set_length(388) disc.append_track(track) track = disc.create_track() track.set_number('14') track.set_title('Love (David Mateo & Rafix radio club mix instrumental)') track.set_length(297) disc.append_track(track) track = disc.create_track() track.set_number('15') track.set_title('Love (Rafael Yapudjian Meets RyB dub remix)') track.set_length(449) disc.append_track(track) track = disc.create_track() track.set_number('16') track.set_title('Love (RY Meets Mustafa instrumental mix)') track.set_length(365) disc.append_track(track) expected.append_disc(disc) s = junodownload.ReleaseScraper.from_string('http://www.junodownload.com/products/love/1662955-02/') r = s.get_result() self.assertEqual(expected, r) def test_mixed_various_main_artists(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('25 July, 2011') release_event.set_country(None) expected.append_release_event(release_event) expected.set_format(None) label_id = expected.create_label_id() label_id.set_label('Vacation Australia') label_id.append_catalogue_nr('VRCD 003') expected.append_label_id(label_id) expected.set_title('Bass Kleph Presents') artist = expected.create_artist() artist.set_name('Bass Kleph') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre('Funky') expected.append_genre('Club House') expected.set_url('http://www.junodownload.com/products/bass-kleph-bass-kleph-presents/1789514-02/') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Bass Kleph & Filthy Rich - These Mornings') track.set_length(368) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Bass Kleph & Alex Kenji - Melocoton') track.set_length(370) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Bass Kleph - Hey Ya') track.set_length(380) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Bass Kleph & Chris Arnott & BKCA - We Feel Love') track.set_length(360) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Bass Kleph - Oh Yeah') track.set_length(403) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Stella MC & Bass Kleph - $pend My Money (original club mix)') track.set_length(490) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title("Bass Kleph - I'll Be OK") track.set_length(434) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Danny T & Oh Snap!! - Whine Ya Waistline (Bass Kleph remix)') track.set_length(404) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('Joan Reyes - Shakedown (Bass Kleph remix)') track.set_length(438) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Bass Kleph & Prok & Fitch - Disco Ate My Baby') track.set_length(362) disc.append_track(track) track = disc.create_track() track.set_number('11') track.set_title('Moguai & Westbam - Original Hardcore (Bass Kleph remix)') track.set_length(420) disc.append_track(track) track = disc.create_track() track.set_number('12') track.set_title('Jesse Vorn - Somewhere (Bass Kleph remix)') track.set_length(376) disc.append_track(track) track = disc.create_track() track.set_number('13') track.set_title('Bass Kleph & Chris Arnott & Kid Massive - All Right (Bass Kleph remix)') track.set_length(456) disc.append_track(track) track = disc.create_track() track.set_number('14') track.set_title('Bass Kleph & Joan Reyes - Salida (original club mix)') track.set_length(427) disc.append_track(track) track = disc.create_track() track.set_number('15') track.set_title('Bass Kleph & D Ramirez - Pulse') track.set_length(396) disc.append_track(track) track = disc.create_track() track.set_number('16') track.set_title('Various - Bass Kleph Presents (continuous DJ mix by Bass Kleph)') track.set_length(4439) disc.append_track(track) expected.append_disc(disc) s = junodownload.ReleaseScraper.from_string('http://www.junodownload.com/products/bass-kleph-bass-kleph-presents/1789514-02/') r = s.get_result() self.assertEqual(expected, r) def test_various_artists(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('30 December, 2008') release_event.set_country(None) expected.append_release_event(release_event) expected.set_format(None) label_id = expected.create_label_id() label_id.set_label('NuZone Tools') label_id.append_catalogue_nr('NZT 015') expected.append_label_id(label_id) expected.set_title('2008 MOST USEFUL TOOLS') artist = expected.create_artist() artist.set_name(None) artist.set_various(True) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre('Progressive House') expected.set_url('http://www.junodownload.com/products/2008-most-useful-tools/1384246-02/') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Sygma - Nightlights') track.set_length(522) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title("Adolfo Morrone - I'm Nervhouse") track.set_length(455) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Jonathan Carey - The Science Of Music') track.set_length(354) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Lorenzo Venturini - New Era') track.set_length(415) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('E-Mark - Anthem For Deejays Part 2') track.set_length(420) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Alex Spadoni - Sunset') track.set_length(451) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Jordan Baxxter feat Aedo - What It Feels Like For A Girl?') track.set_length(470) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Hildebrand - Raindrops') track.set_length(519) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('Dario Maffia - Phaelon') track.set_length(545) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Emerald Coast - Exhausted') track.set_length(338) disc.append_track(track) track = disc.create_track() track.set_number('11') track.set_title('Sygma - Children') track.set_length(539) disc.append_track(track) track = disc.create_track() track.set_number('12') track.set_title('GoldSaint - Tonight') track.set_length(405) disc.append_track(track) track = disc.create_track() track.set_number('13') track.set_title('Peter Santos - Back To You') track.set_length(454) disc.append_track(track) track = disc.create_track() track.set_number('14') track.set_title('Oscar Burnside - Dark Side') track.set_length(334) disc.append_track(track) track = disc.create_track() track.set_number('15') track.set_title('GoldSaint - Recharge') track.set_length(510) disc.append_track(track) track = disc.create_track() track.set_number('16') track.set_title('Luca Lux - Wildest Dream') track.set_length(428) disc.append_track(track) track = disc.create_track() track.set_number('17') track.set_title('SimoX DJ - Star') track.set_length(317) disc.append_track(track) track = disc.create_track() track.set_number('18') track.set_title('Greek S - The Sound (09 mix)') track.set_length(517) disc.append_track(track) track = disc.create_track() track.set_number('19') track.set_title('Various - Mixed Tools 2008 (Part 1 - mixed by Sygma)') track.set_length(2494) disc.append_track(track) track = disc.create_track() track.set_number('20') track.set_title('Various - Mixed Tools 2008 (Part 2 - mixed by Peter Santos)') track.set_length(2334) disc.append_track(track) expected.append_disc(disc) s = junodownload.ReleaseScraper.from_string('http://www.junodownload.com/products/2008-most-useful-tools/1384246-02/') r = s.get_result() self.assertEqual(expected, r) def test_404(self): expected = NotFoundResult() expected.set_scraper_name(None) s = junodownload.ReleaseScraper.from_string('http://www.junodownload.com/products/2008-most-useful-tools/99999999/') r = s.get_result() self.assertEqual(expected, r) def test_search_scraper(self): s = junodownload.SearchScraper('love') r = s.get_result() self.assertTrue(len(r.get_items()) > 0) class ITunesTest(TestCase): def test_simple_album(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('1985') release_event.set_country(None) expected.append_release_event(release_event) expected.set_format(None) expected.set_title('Love (Remastered)') artist = expected.create_artist() artist.set_name('The Cult') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre('Rock') expected.append_genre('Adult Alternative') expected.append_genre('Hard Rock') expected.append_genre('Alternative') expected.append_genre('Goth Rock') expected.append_genre('College Rock') expected.set_url('http://itunes.apple.com/us/album/love-remastered/id3022929?ign-mpt=uo%3D4') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Nirvana') track.set_length(326) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Big Neon Glitter') track.set_length(291) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Love') track.set_length(329) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Brother Wolf, Sister Moon') track.set_length(407) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Rain') track.set_length(236) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Phoenix') track.set_length(306) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Hollow Man') track.set_length(285) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Revolution') track.set_length(326) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('She Sells Sanctuary') track.set_length(263) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Black Angel') track.set_length(322) disc.append_track(track) expected.append_disc(disc) s = itunes.ReleaseScraper.from_string('http://itunes.apple.com/us/album/love-remastered/id3022929?ign-mpt=uo%3D4') r = s.get_result() self.assertEqual(expected, r) def test_multiple_cds(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('Aug 15, 2007') release_event.set_country(None) expected.append_release_event(release_event) expected.set_format(None) expected.set_title('Dark Passion Play (Double Disc Version)') artist = expected.create_artist() artist.set_name('Nightwish') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre('Rock') expected.append_genre('Metal') expected.append_genre('Alternative') expected.append_genre('Goth Rock') expected.append_genre('Death Metal/Black Metal') expected.set_url('https://itunes.apple.com/us/album/dark-passion-play-double-disc/id264697038?uo=4') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('The Poet and the Pendulum') track.set_length(834) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Bye Bye Beautiful') track.set_length(254) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Amaranth') track.set_length(231) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Cadence of Her Last Breath') track.set_length(254) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Master Passion Greed') track.set_length(362) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Eva') track.set_length(265) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Sahara') track.set_length(347) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Whoever Brings the Night') track.set_length(257) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('For the Heart I Once Had') track.set_length(235) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('The Islander') track.set_length(305) disc.append_track(track) track = disc.create_track() track.set_number('11') track.set_title('Last of the Wilds') track.set_length(340) disc.append_track(track) track = disc.create_track() track.set_number('12') track.set_title('7 Days to the Wolves') track.set_length(423) disc.append_track(track) track = disc.create_track() track.set_number('13') track.set_title('Meadows of Heaven') track.set_length(430) disc.append_track(track) track = disc.create_track() track.set_number('14') track.set_title('Reach (Amaranth Demo Version) [Bonus Track]') track.set_length(232) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(2) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('The Poet and the Pendulum (Instrumental)') track.set_length(834) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Bye Bye Beautiful (Instrumental)') track.set_length(254) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Amaranth (Instrumental)') track.set_length(231) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Cadence of Her Last Breath (Instrumental)') track.set_length(254) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Master Passion Greed (Instrumental)') track.set_length(362) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Eva (Instrumental)') track.set_length(265) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Sahara (Instrumental)') track.set_length(347) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Whoever Brings the Night (Instrumental)') track.set_length(257) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('For the Heart I Once Had (Instrumental)') track.set_length(236) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('The Islander (Instrumental)') track.set_length(305) disc.append_track(track) track = disc.create_track() track.set_number('11') track.set_title('Last of the Wilds (Instrumental)') track.set_length(340) disc.append_track(track) track = disc.create_track() track.set_number('12') track.set_title('7 Days to the Wolves (Instrumental)') track.set_length(424) disc.append_track(track) track = disc.create_track() track.set_number('13') track.set_title('Meadows of Heaven (Instrumental)') track.set_length(429) disc.append_track(track) expected.append_disc(disc) s = itunes.ReleaseScraper.from_string('https://itunes.apple.com/us/album/dark-passion-play-double-disc/id264697038?uo=4') r = s.get_result() self.assertEqual(expected, r) def test_various_artists(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('Oct 28, 2008') release_event.set_country(None) expected.append_release_event(release_event) expected.set_format(None) expected.set_title('Twilight (Original Motion Picture Soundtrack)') artist = expected.create_artist() artist.set_name(None) artist.set_various(True) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre('Soundtrack') expected.set_url('https://itunes.apple.com/us/album/twilight-original-motion-picture/id294342468?ign-mpt=uo%3D4') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Supermassive Black Hole') track.set_length(209) track_artist = expected.create_artist() track_artist.set_name('Muse') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Decode') track.set_length(261) track_artist = expected.create_artist() track_artist.set_name('Paramore') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Full Moon') track.set_length(230) track_artist = expected.create_artist() track_artist.set_name('The Black Ghosts') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Leave Out All the Rest') track.set_length(199) track_artist = expected.create_artist() track_artist.set_name('LINKIN PARK') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Spotlight (Twilight Mix)') track.set_length(200) track_artist = expected.create_artist() track_artist.set_name('MuteMath') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Go All the Way (Into the Twilight)') track.set_length(207) track_artist = expected.create_artist() track_artist.set_name('Perry Farrell') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Tremble for My Beloved') track.set_length(233) track_artist = expected.create_artist() track_artist.set_name('Collective Soul') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('I Caught Myself') track.set_length(235) track_artist = expected.create_artist() track_artist.set_name('Paramore') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('Eyes On Fire') track.set_length(301) track_artist = expected.create_artist() track_artist.set_name('Blue Foundation') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Never Think') track.set_length(269) track_artist = expected.create_artist() track_artist.set_name('Rob Pattinson') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('11') track.set_title('Flightless Bird, American Mouth') track.set_length(240) track_artist = expected.create_artist() track_artist.set_name('Iron & Wine') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('12') track.set_title("Bella's Lullaby") track.set_length(138) track_artist = expected.create_artist() track_artist.set_name('Carter Burwell') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('13') track.set_title('Let Me Sign (Bonus Track)') track.set_length(138) track_artist = expected.create_artist() track_artist.set_name('Rob Pattinson') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('14') track.set_title('La Traviata (Bonus Track)') track.set_length(185) track_artist = expected.create_artist() track_artist.set_name('Royal Philharmonic Orchestra') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('15') track.set_title('Clair de Lune (Bonus Track)') track.set_length(358) track_artist = expected.create_artist() track_artist.set_name('The APM Orchestra') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) s = itunes.ReleaseScraper.from_string('https://itunes.apple.com/us/album/twilight-original-motion-picture/id294342468?ign-mpt=uo%3D4') r = s.get_result() self.assertEqual(expected, r) def test_non_us_store(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('1985') release_event.set_country(None) expected.append_release_event(release_event) expected.set_format(None) expected.set_title('Love (Remastered)') artist = expected.create_artist() artist.set_name('The Cult') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre('Rock') expected.append_genre('Musique') expected.append_genre('Alternative adulte') expected.append_genre('Hard rock') expected.append_genre('Alternative') expected.append_genre('Rock gothique') expected.append_genre('College rock') expected.set_url('http://itunes.apple.com/fr/album/love-remastered/id3022929?ign-mpt=uo%3D4') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Nirvana') track.set_length(326) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Big Neon Glitter') track.set_length(291) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Love') track.set_length(329) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Brother Wolf, Sister Moon') track.set_length(407) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Rain') track.set_length(236) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Phoenix') track.set_length(306) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Hollow Man') track.set_length(285) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Revolution') track.set_length(326) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('She Sells Sanctuary') track.set_length(263) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Black Angel') track.set_length(322) disc.append_track(track) expected.append_disc(disc) s = itunes.ReleaseScraper.from_string('http://itunes.apple.com/fr/album/love-remastered/id3022929?ign-mpt=uo%3D4') r = s.get_result() self.assertEqual(expected, r) def test_tracknum_in_name_column(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('Jun 01, 2005') release_event.set_country(None) expected.append_release_event(release_event) expected.set_format(None) expected.set_title('Chopin: Piano Works') artist = expected.create_artist() artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) <|fim▁hole|> expected.append_genre('Classical') expected.set_url('https://itunes.apple.com/us/album/chopin-piano-works/id77261376') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('12 Etudes, Op. 10: No. 1. in C') track.set_length(136) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('12 Etudes, Op.10: No. 2. in A Minor "chromatique"') track.set_length(84) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('12 Etudes, Op.10: No. 3. in E "Tristesse"') track.set_length(243) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('12 Etudes, Op.10: No. 4. in C-Sharp Minor') track.set_length(128) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('12 Etudes, Op.10: No. 5. in G-Flat "Black Keys"') track.set_length(102) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('12 Etudes, Op.10: No. 6. in E-Flat Minor') track.set_length(195) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('12 Etudes, Op.10: No. 7. in C') track.set_length(92) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('12 Etudes, Op.10: No. 8. in F') track.set_length(163) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('12 Etudes, Op.10: No. 9. in F Minor') track.set_length(137) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('12 Etudes, Op.10: No. 10. in A-Flat') track.set_length(140) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('11') track.set_title('12 Etudes, Op.10: No. 11. in E-Flat') track.set_length(135) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('12') track.set_title('12 Etudes, Op.10: No. 12. in C Minor "Revolutionary"') track.set_length(173) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('13') track.set_title('12 Etudes, Op. 25: No. 1 in A-Flat - "Harp Study"') track.set_length(171) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('14') track.set_title('12 Etudes, Op.25: No. 2 in F Minor') track.set_length(92) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('15') track.set_title('12 Etudes, Op.25: No. 3 in F Major') track.set_length(103) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('16') track.set_title('12 Etudes, Op.25: No. 4 in A Minor') track.set_length(88) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('17') track.set_title('12 Etudes, Op.25: No. 5 in E Minor') track.set_length(198) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('18') track.set_title('12 Etudes, Op.25: No. 6 in G-Sharp Minor') track.set_length(117) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('19') track.set_title('12 Etudes, Op.25: No. 7 in C-Sharp Minor') track.set_length(312) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('20') track.set_title('12 Etudes, Op.25: No. 8 in D-Flat') track.set_length(66) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('21') track.set_title('12 Etudes, Op.25: No. 9 in G-Flat, "Butterfly Wings"') track.set_length(62) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('22') track.set_title('12 Etudes, Op.25: No. 10 in B Minor') track.set_length(244) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('23') track.set_title('12 Etudes, Op.25: No. 11 in A Minor "Winter Wind"') track.set_length(215) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('24') track.set_title('12 Etudes, Op. 25: No. 12 in C Minor') track.set_length(168) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('25') track.set_title('Impromptu No. 1 in A-Flat, Op.29') track.set_length(233) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('26') track.set_title('Impromptu No. 2 in F-Sharp, Op.36') track.set_length(351) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('27') track.set_title('Impromptu No. 3 in G-Flat, Op.51') track.set_length(284) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('28') track.set_title('Impromptu No. 4 in C-Sharp Minor, Op. 66 "Fantaisie-Impromptu"') track.set_length(291) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(2) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Piano Sonata No. 2 in B-Flat Minor, Op. 35: I. Grave - Doppio Movimento') track.set_length(331) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title(u'Piano Sonata No. 2 in B-Flat Minor, Op. 35: II. Scherzo - Pi\xf9 Lento - Tempo I') track.set_length(397) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title(u'Piano Sonata No. 2 in B-Flat Minor, Op. 35: III. Marche Fun\xe8bre (Lento)') track.set_length(503) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Piano Sonata No. 2 in B-Flat Minor, Op. 35: IV. Finale (Presto)') track.set_length(97) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Piano Sonata No. 3 in B Minor, Op. 58: I. Allegro Maestoso') track.set_length(533) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Piano Sonata No. 3 in B Minor, Op. 58: II. Scherzo (Molto Vivace)') track.set_length(170) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Piano Sonata No. 3 in B Minor, Op. 58: III. Largo') track.set_length(561) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Piano Sonata No. 3 in B Minor, Op. 58: IV. Finale (Presto Non Tanto)') track.set_length(309) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('Mazurka No. 54 in D: Allegro Non Troppo') track.set_length(71) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Mazurka No. 46 in C Op.67 No.3: Allegretto') track.set_length(88) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('11') track.set_title('Mazurka No. 49 in A Minor Op. 68, No. 2: Lento') track.set_length(155) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('12') track.set_title('Mazurka No. 5 in B-Flat Op. 7, No. 1: Vivace') track.set_length(140) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('13') track.set_title('Introduction and Variations On a German National Air Op.posth. (KK 925-927)') track.set_length(387) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('14') track.set_title('Mazurka No. 58 in A-Flat: Poco Mosso') track.set_length(77) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('15') track.set_title('Berceuse in D-Flat, Op. 57: Andante') track.set_length(316) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('16') track.set_title('Polonaise No. 6 in A-Flat, Op. 53 -"Heroic": Maestoso') track.set_length(413) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(3) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Piano Concerto No. 1 in E Minor, Op. 11: I. Allegro Maestoso') track.set_length(1215) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Jerzy Semkow') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Berlin Philharmonic') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Piano Concerto No. 1 in E Minor, Op. 11: II. Romance (Larghetto)') track.set_length(636) track_artist = expected.create_artist() track_artist.set_name(u'Jerzy Semkow') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Berlin Philharmonic') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Piano Concerto No. 1 in E Minor, Op. 11: III. Rondo (Vivace)') track.set_length(619) track_artist = expected.create_artist() track_artist.set_name(u'Jerzy Semkow') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Berlin Philharmonic') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Piano Concerto No. 2 in F Minor, Op. 21: I. Maestoso') track.set_length(901) track_artist = expected.create_artist() track_artist.set_name(u'Janos Kulka') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Berlin Philharmonic') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Piano Concerto No. 2 in F Minor, Op. 21: II. Larghetto') track.set_length(583) track_artist = expected.create_artist() track_artist.set_name(u'Janos Kulka') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Berlin Philharmonic') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Piano Concerto No. 2 in F Minor, Op. 21: III. Allegro Vivace') track.set_length(524) track_artist = expected.create_artist() track_artist.set_name(u'Janos Kulka') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Berlin Philharmonic') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) s = itunes.ReleaseScraper.from_string('https://itunes.apple.com/us/album/chopin-piano-works/id77261376') r = s.get_result() self.assertEqual(expected, r) def test_404(self): expected = NotFoundResult() expected.set_scraper_name(None) s = itunes.ReleaseScraper.from_string('http://itunes.apple.com/us/album/blubb/id999999999999') r = s.get_result() self.assertEqual(expected, r) def test_non_us_404(self): expected = NotFoundResult() expected.set_scraper_name(None) s = itunes.ReleaseScraper.from_string('http://itunes.apple.com/fr/album/blubb/id999999999999') r = s.get_result() self.assertEqual(expected, r) def test_search_scraper(self): s = itunes.SearchScraper('love') r = s.get_result() self.assertTrue(len(r.get_items()) > 0) class BandcampTest(TestCase): def test_album_with_band_name(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(u'2012') release_event.set_country(None) expected.append_release_event(release_event) expected.set_format('WEB release') expected.set_title(u'Love Sex Machine') artist = expected.create_artist() artist.set_name(u'Love Sex Machine') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.set_url('http://music.throatruinerrecords.com/album/love-sex-machine') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title(u'Anal On Deceased Virgin') track.set_length(335) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title(u'Deafening Peepshow') track.set_length(270) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title(u'Fucking Battle') track.set_length(157) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title(u'Antagonism Can STFU') track.set_length(179) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title(u'Plenty Of Feelings') track.set_length(147) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title(u'Vagina Curse') track.set_length(320) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title(u'Killed With A Monster Cock') track.set_length(284) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title(u'Warstrike Takes The Piss') track.set_length(275) disc.append_track(track) expected.append_disc(disc) s = bandcamp.ReleaseScraper.from_string('http://music.throatruinerrecords.com/album/love-sex-machine') r = s.get_result() self.assertEqual(expected, r) # there was a test with this name, but I don't know what it did... # def test_album_without_band_name(self): # pass def test_album_with_various_artists(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(u'2013') release_event.set_country(None) expected.append_release_event(release_event) expected.set_format('WEB release') expected.set_title(u'Indietracks Compilation 2013') artist = expected.create_artist() artist.set_name(None) artist.set_various(True) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.set_url('http://indietracks.bandcamp.com/album/indietracks-compilation-2013') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title(u'If You Still Want Him') track.set_length(250) track_artist = expected.create_artist() track_artist.set_name(u'The French Defence') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title(u'Is Anybody Out There?') track.set_length(246) track_artist = expected.create_artist() track_artist.set_name(u'The Ballet') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title(u'Rulers And The States') track.set_length(165) track_artist = expected.create_artist() track_artist.set_name(u'bis') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title(u'Temporary Tattoo') track.set_length(171) track_artist = expected.create_artist() track_artist.set_name(u'Lardpony') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title(u'Always Want Us To') track.set_length(192) track_artist = expected.create_artist() track_artist.set_name(u'EXPENSIVE') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title(u'Stockport') track.set_length(328) track_artist = expected.create_artist() track_artist.set_name(u'The Wake') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title(u'Secret') track.set_length(132) track_artist = expected.create_artist() track_artist.set_name(u'Frozy') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title(u'Jackie') track.set_length(218) track_artist = expected.create_artist() track_artist.set_name(u'The Understudies') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title(u'Ticket Machine') track.set_length(184) track_artist = expected.create_artist() track_artist.set_name(u'Making Marks') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title(u'Echoing Days') track.set_length(204) track_artist = expected.create_artist() track_artist.set_name(u'Monnone Alone') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('11') track.set_title(u'Swanwick Junction') track.set_length(172) track_artist = expected.create_artist() track_artist.set_name(u'Northern Spies') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('12') track.set_title(u'Terrible Things') track.set_length(141) track_artist = expected.create_artist() track_artist.set_name(u'Owl & Mouse') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('13') track.set_title(u"She'll Come Back for Indian Summer") track.set_length(218) track_artist = expected.create_artist() track_artist.set_name(u'Alpaca Sports') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('14') track.set_title(u'Glockelbar') track.set_length(137) track_artist = expected.create_artist() track_artist.set_name(u'Haiku Salut') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('15') track.set_title(u'Astronaut') track.set_length(190) track_artist = expected.create_artist() track_artist.set_name(u'Woog Riots') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('16') track.set_title(u'Tut Tut Tut') track.set_length(150) track_artist = expected.create_artist() track_artist.set_name(u'The Tuts') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('17') track.set_title(u'Mosaic') track.set_length(161) track_artist = expected.create_artist() track_artist.set_name(u'Fear Of Men') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('18') track.set_title(u'Only You') track.set_length(194) track_artist = expected.create_artist() track_artist.set_name(u'Big Wave') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('19') track.set_title(u'The Things That You Said') track.set_length(200) track_artist = expected.create_artist() track_artist.set_name(u'The Fireworks') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('20') track.set_title(u'Glue') track.set_length(276) track_artist = expected.create_artist() track_artist.set_name(u'Fever Dream') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('21') track.set_title(u'Slackjawed') track.set_length(175) track_artist = expected.create_artist() track_artist.set_name(u'Tunabunny') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('22') track.set_title(u'Lie') track.set_length(224) track_artist = expected.create_artist() track_artist.set_name(u'Cars Can Be Blue') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('23') track.set_title(u'Br\xe4nn\xf6') track.set_length(223) track_artist = expected.create_artist() track_artist.set_name(u'Finnmark!') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('24') track.set_title(u'Sorry') track.set_length(166) track_artist = expected.create_artist() track_artist.set_name(u'The Art Club') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('25') track.set_title(u'Food') track.set_length(181) track_artist = expected.create_artist() track_artist.set_name(u'The Lovely Eggs') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('26') track.set_title(u'Clean Up Yr Own Shit, Pal') track.set_length(132) track_artist = expected.create_artist() track_artist.set_name(u'Good Grief') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('27') track.set_title(u'Sycamore') track.set_length(162) track_artist = expected.create_artist() track_artist.set_name(u'Martha') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('28') track.set_title(u'Disappear') track.set_length(147) track_artist = expected.create_artist() track_artist.set_name(u'Bloomer') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('29') track.set_title(u'You Held My Hand') track.set_length(158) track_artist = expected.create_artist() track_artist.set_name(u'Flowers') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('30') track.set_title(u'J.K.') track.set_length(139) track_artist = expected.create_artist() track_artist.set_name(u'David Leach') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('31') track.set_title(u'Always Thought') track.set_length(294) track_artist = expected.create_artist() track_artist.set_name(u'Jupiter In Jars') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('32') track.set_title(u'My Old Friend') track.set_length(164) track_artist = expected.create_artist() track_artist.set_name(u"Enderby's Room") track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('33') track.set_title(u'I Got The Answer') track.set_length(172) track_artist = expected.create_artist() track_artist.set_name(u'The Magic Theatre') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('34') track.set_title(u'I Love You') track.set_length(178) track_artist = expected.create_artist() track_artist.set_name(u'The Wave Pictures') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('35') track.set_title(u'Pilot Light') track.set_length(234) track_artist = expected.create_artist() track_artist.set_name(u'Pete Green') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('36') track.set_title(u"Let's Go Surfing") track.set_length(181) track_artist = expected.create_artist() track_artist.set_name(u'Helen Love') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('37') track.set_title(u'Summer, You And Me') track.set_length(180) track_artist = expected.create_artist() track_artist.set_name(u'When Nalda Became Punk') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('38') track.set_title(u'Secret Wish') track.set_length(89) track_artist = expected.create_artist() track_artist.set_name(u'The McTells') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('39') track.set_title(u'Better Than Love') track.set_length(163) track_artist = expected.create_artist() track_artist.set_name(u'Pale Spectres') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('40') track.set_title(u'Without You') track.set_length(147) track_artist = expected.create_artist() track_artist.set_name(u'Milky Wimpshake') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('41') track.set_title(u"Let's Stay Undecided") track.set_length(181) track_artist = expected.create_artist() track_artist.set_name(u'The Soulboy Collective mit Antenne Danger') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('42') track.set_title(u'Age Of Victoria') track.set_length(261) track_artist = expected.create_artist() track_artist.set_name(u'The Secret History') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('43') track.set_title(u'Eating Me, Eating You') track.set_length(202) track_artist = expected.create_artist() track_artist.set_name(u'The Beautiful Word') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('44') track.set_title(u'Scared And Worried') track.set_length(142) track_artist = expected.create_artist() track_artist.set_name(u'Without Feathers') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('45') track.set_title(u'Save Me') track.set_length(155) track_artist = expected.create_artist() track_artist.set_name(u'The Choo Choo Trains') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('46') track.set_title(u'Evil/Shy (Acoustic Version)') track.set_length(187) track_artist = expected.create_artist() track_artist.set_name(u'The Mini Skips') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('47') track.set_title(u'Slow Trains') track.set_length(201) track_artist = expected.create_artist() track_artist.set_name(u'anaesthetics') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) s = bandcamp.ReleaseScraper.from_string('http://indietracks.bandcamp.com/album/indietracks-compilation-2013') r = s.get_result() self.assertEqual(expected, r) def test_album_with_track_artist(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(u'2012') release_event.set_country(None) expected.append_release_event(release_event) expected.set_format('Free WEB release') expected.set_title(u'Love Everyday EP') artist = expected.create_artist() artist.set_name(u'Dumbfoundead') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.set_url('http://music.dumbfoundead.com/album/love-everyday-ep') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title(u'For You') track.set_length(91) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title(u'Love Everyday') track.set_length(211) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title(u'Stole the Show') track.set_length(177) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title(u'Love is a Song') track.set_length(292) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title(u'Body High ft. Breezy Lovejoy & Jose Rios') track.set_length(267) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title(u'Not Right Now ft. Wax') track.set_length(173) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title(u'Breezy Lovejoy - Paradise') track.set_length(202) track_artist = expected.create_artist() track_artist.set_name(u'Breezy Lovejoy') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) s = bandcamp.ReleaseScraper.from_string('http://music.dumbfoundead.com/album/love-everyday-ep') r = s.get_result() self.assertEqual(expected, r) def test_album_with_utf8_characters(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(u'2012') release_event.set_country(None) expected.append_release_event(release_event) expected.set_format('WEB release') expected.set_title(u'Illusions') artist = expected.create_artist() artist.set_name(u'Approaching Nirvana') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.set_url('http://music.approachingnirvana.com/album/illusions') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title(u'Sugar High') track.set_length(162) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title(u'Illusion (feat. Brenton Mattheus)') track.set_length(267) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title(u'Beer Remastered') track.set_length(281) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title(u'Snowfall') track.set_length(270) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title(u'Love Theory') track.set_length(157) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title(u'Canc\xfan') track.set_length(257) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title(u'South Side') track.set_length(268) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title(u'Illusion (Instrumental)') track.set_length(267) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title(u'Love Theory (Instrumental)') track.set_length(157) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title(u'Illusion (Extended Mix) [feat. Brenton Mattheus]') track.set_length(372) disc.append_track(track) track = disc.create_track() track.set_number('11') track.set_title(u'Beer Remastered (Extended Mix)') track.set_length(420) disc.append_track(track) track = disc.create_track() track.set_number('12') track.set_title(u'Snowfall (Extended Mix)') track.set_length(424) disc.append_track(track) track = disc.create_track() track.set_number('13') track.set_title(u'Love Theory (Extended Mix)') track.set_length(299) disc.append_track(track) track = disc.create_track() track.set_number('14') track.set_title(u'Canc\xfan (Extended Mix)') track.set_length(374) disc.append_track(track) track = disc.create_track() track.set_number('15') track.set_title(u'South Side (Extended Mix)') track.set_length(374) disc.append_track(track) track = disc.create_track() track.set_number('16') track.set_title(u'Illusions Continuous Mix') track.set_length(2018) disc.append_track(track) track = disc.create_track() track.set_number('17') track.set_title(u'Illusions Continuous Instrumental Mix') track.set_length(2018) disc.append_track(track) expected.append_disc(disc) s = bandcamp.ReleaseScraper.from_string('http://music.approachingnirvana.com/album/illusions') r = s.get_result() self.assertEqual(expected, r) def test_band_discography(self): expected = ListResult() expected.set_scraper_name(None) item = expected.create_item() item.set_name(u'Amanda Palmer \u2013 Who Killed Amanda Palmer [Alternate Tracks]') item.set_info('Release date: 2008-12-24') item.set_query(u'http://amandapalmer.bandcamp.com/album/who-killed-amanda-palmer-alternate-tracks?pk=459') item.set_url(u'http://amandapalmer.bandcamp.com/album/who-killed-amanda-palmer-alternate-tracks?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u'8in8 \u2013 Nighty Night') item.set_info('Release date: 2011-04-26') item.set_query(u'http://amandapalmer.bandcamp.com/album/nighty-night?pk=459') item.set_url(u'http://amandapalmer.bandcamp.com/album/nighty-night?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u'Amanda Palmer & The Grand Theft Orchestra \u2013 Theatre Is Evil') item.set_info('Release date: 2012-09-11') item.set_query(u'http://amandapalmer.bandcamp.com/album/theatre-is-evil-2?pk=459') item.set_url(u'http://amandapalmer.bandcamp.com/album/theatre-is-evil-2?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u'Amanda Palmer \u2013 The Art of Asking Playlist') item.set_info('Release date: 2014-11-11') item.set_query(u'http://amandapalmer.bandcamp.com/album/the-art-of-asking-playlist?pk=459') item.set_url(u'http://amandapalmer.bandcamp.com/album/the-art-of-asking-playlist?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u'Amanda Palmer \u2013 Amanda Palmer Performs The Popular Hits Of Radiohead On Her Magical Ukulele') item.set_info('Release date: 2010-07-20') item.set_query(u'http://amandapalmer.bandcamp.com/album/amanda-palmer-performs-the-popular-hits-of-radiohead-on-her-magical-ukulele?pk=459') item.set_url(u'http://amandapalmer.bandcamp.com/album/amanda-palmer-performs-the-popular-hits-of-radiohead-on-her-magical-ukulele?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u'Tristan Allen \u2013 Tristan Allen') item.set_info('Release date: 2010-12-12') item.set_query(u'http://amandapalmer.bandcamp.com/album/tristan-allen?pk=459') item.set_url(u'http://amandapalmer.bandcamp.com/album/tristan-allen?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u"Amanda Palmer & Friends \u2013 AFP's Webcastacular NYC Extravaganzaca!") item.set_info('Release date: 2010-09-23') item.set_query(u'http://amandapalmer.bandcamp.com/album/afps-webcastacular-nyc-extravaganzaca?pk=459') item.set_url(u'http://amandapalmer.bandcamp.com/album/afps-webcastacular-nyc-extravaganzaca?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u'Amanda Palmer \u2013 Who Killed Amanda Palmer') item.set_info('Release date: 2008-09-16') item.set_query(u'http://amandapalmer.bandcamp.com/album/who-killed-amanda-palmer?pk=459') item.set_url(u'http://amandapalmer.bandcamp.com/album/who-killed-amanda-palmer?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u'Amanda Palmer & Murder By Death \u2013 7 Series (Part 3)') item.set_info('Release date: 2009-05-15') item.set_query(u'http://amandapalmer.bandcamp.com/album/7-series-part-3?pk=459') item.set_url(u'http://amandapalmer.bandcamp.com/album/7-series-part-3?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u'Amanda Palmer \u2013 Amanda Palmer Goes Down Under') item.set_info('Release date: 2011-01-21') item.set_query(u'http://amandapalmer.bandcamp.com/album/amanda-palmer-goes-down-under?pk=459') item.set_url(u'http://amandapalmer.bandcamp.com/album/amanda-palmer-goes-down-under?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u'Amanda Palmer, The Young Punx, and Peaches \u2013 Map of Tasmania: The Remix Project') item.set_info('Release date: 2011-04-10') item.set_query(u'http://amandapalmer.bandcamp.com/album/map-of-tasmania-the-remix-project?pk=459') item.set_url(u'http://amandapalmer.bandcamp.com/album/map-of-tasmania-the-remix-project?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u'Neil Gaiman and Amanda Palmer \u2013 An Evening With Neil Gaiman and Amanda Palmer') item.set_info('Release date: 2013-11-19') item.set_query(u'http://amandapalmer.bandcamp.com/album/an-evening-with-neil-gaiman-and-amanda-palmer?pk=459') item.set_url(u'http://amandapalmer.bandcamp.com/album/an-evening-with-neil-gaiman-and-amanda-palmer?pk=459') expected.append_item(item) s = bandcamp.DiscographyScraper.from_string('http://amandapalmer.bandcamp.com') r = s.get_result() self.assertEqual(expected, r) def test_band_discogrpahy_non_bandcamp_url(self): expected = ListResult() expected.set_scraper_name(None) item = expected.create_item() item.set_name(u'Sufjan Stevens \u2013 All Delighted People EP') item.set_info('Release date: 2010-08-20') item.set_query(u'http://music.sufjan.com/album/all-delighted-people-ep?pk=459') item.set_url(u'http://music.sufjan.com/album/all-delighted-people-ep?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u'Sufjan Stevens \u2013 Illinois') item.set_info('Release date: 2005-07-05') item.set_query(u'http://music.sufjan.com/album/illinois?pk=459') item.set_url(u'http://music.sufjan.com/album/illinois?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u'Sufjan Stevens \u2013 Songs for Christmas') item.set_info('Release date: 2006-11-21') item.set_query(u'http://music.sufjan.com/album/songs-for-christmas?pk=459') item.set_url(u'http://music.sufjan.com/album/songs-for-christmas?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u'Sufjan Stevens \u2013 A Sun Came') item.set_info('Release date: 2004-07-20') item.set_query(u'http://music.sufjan.com/album/a-sun-came?pk=459') item.set_url(u'http://music.sufjan.com/album/a-sun-came?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u'Sufjan Stevens \u2013 The Avalanche') item.set_info('Release date: 2006-07-11') item.set_query(u'http://music.sufjan.com/album/the-avalanche?pk=459') item.set_url(u'http://music.sufjan.com/album/the-avalanche?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u'Sufjan Stevens \u2013 The BQE') item.set_info('Release date: 2009-10-20') item.set_query(u'http://music.sufjan.com/album/the-bqe?pk=459') item.set_url(u'http://music.sufjan.com/album/the-bqe?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u'Sufjan Stevens \u2013 Silver & Gold') item.set_info('Release date: 2012-11-13') item.set_query(u'http://music.sufjan.com/album/silver-gold?pk=459') item.set_url(u'http://music.sufjan.com/album/silver-gold?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u'Sufjan Stevens \u2013 Michigan') item.set_info('Release date: 2003-07-01') item.set_query(u'http://music.sufjan.com/album/michigan?pk=459') item.set_url(u'http://music.sufjan.com/album/michigan?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u'Sufjan Stevens \u2013 Carrie & Lowell') item.set_info('Release date: 2015-03-31') item.set_query(u'http://music.sufjan.com/album/carrie-lowell?pk=459') item.set_url(u'http://music.sufjan.com/album/carrie-lowell?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u'Sufjan Stevens \u2013 Enjoy Your Rabbit') item.set_info('Release date: 2002-04-16') item.set_query(u'http://music.sufjan.com/album/enjoy-your-rabbit?pk=459') item.set_url(u'http://music.sufjan.com/album/enjoy-your-rabbit?pk=459') expected.append_item(item) item = expected.create_item() item.set_name(u'Sufjan Stevens \u2013 The Age of Adz') item.set_info('Release date: 2010-10-12') item.set_query(u'http://music.sufjan.com/album/the-age-of-adz?pk=459') item.set_url(u'http://music.sufjan.com/album/the-age-of-adz?pk=459') expected.append_item(item) s = bandcamp.DiscographyScraper('http://music.sufjan.com') r = s.get_result() self.assertEqual(expected, r) def test_404(self): expected = NotFoundResult() expected.set_scraper_name(None) s = bandcamp.ReleaseScraper.from_string('http://blubb.bla.com/album/blubb') r = s.get_result() self.assertEqual(expected, r) @unittest.skip("skipping Musik-Sammler tests until scraper is fixed") class MusikSammlerTest(TestCase): def test_simple_album(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('1994') release_event.set_country('Niederlande') expected.append_release_event(release_event) expected.set_format(u'CD, Re-Release, Remastered') label_id = expected.create_label_id() label_id.set_label('EMI Records Ltd.') label_id.append_catalogue_nr('7243 8 29752 2 9') expected.append_label_id(label_id) expected.set_title('Dark Side Of The Moon') artist = expected.create_artist() artist.set_name('Pink Floyd') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre('Rock') expected.append_genre('Progressive Rock') expected.append_genre('Psychedelic Rock') expected.set_url('http://www.musik-sammler.de/media/830798/') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('(a) Speak To Me (b) Breathe') track.set_length(237) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('On The Run') track.set_length(215) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Time') track.set_length(424) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('The Great Gig In The Sky') track.set_length(287) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Money') track.set_length(382) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Us And Them') track.set_length(470) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Any Colour You Like') track.set_length(205) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Brain Damage') track.set_length(230) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('Eclipse') track.set_length(121) disc.append_track(track) expected.append_disc(disc) s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/830798/') r = s.get_result() self.assertEqual(expected, r) def test_multiple_discs(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('2011') release_event.set_country('Japan') expected.append_release_event(release_event) expected.set_format(u'2-CD, Pappschuber, Re-Release, Remastered, Digisleeve') label_id = expected.create_label_id() label_id.set_label('EMI Japan') label_id.append_catalogue_nr('TOCP 71163 64') expected.append_label_id(label_id) expected.set_title('The Dark Side Of The Moon') artist = expected.create_artist() artist.set_name('Pink Floyd') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre('Rock') expected.append_genre('Psychedelic Rock') expected.set_url('http://www.musik-sammler.de/media/883773') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Speak To Me') track.set_length(67) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Breathe (In The Air)') track.set_length(169) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('On The Run') track.set_length(225) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Time') track.set_length(413) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('The Great Gig In The Sky') track.set_length(284) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Money') track.set_length(383) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Us And Them') track.set_length(469) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Any Colour You Like') track.set_length(206) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('Brain Damage') track.set_length(226) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Eclipse') track.set_length(132) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(2) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Speak To Me') track.set_length(165) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Breathe (In The Air)') track.set_length(170) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('On The Run') track.set_length(308) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Time') track.set_length(391) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('The Great Gig In The Sky') track.set_length(410) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Money') track.set_length(521) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Us And Them') track.set_length(489) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Any Colour You Like') track.set_length(490) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('Brain Damage') track.set_length(223) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Eclipse') track.set_length(139) disc.append_track(track) expected.append_disc(disc) s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/883773') r = s.get_result() self.assertEqual(expected, r) def test_track_artist(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('2002') release_event.set_country(u'\xd6sterreich') expected.append_release_event(release_event) expected.set_format(u'Split-CD, Cardsleeve') label_id = expected.create_label_id() label_id.set_label('Din Records') label_id.append_catalogue_nr('din cds 2 / EFA 51665-2') expected.append_label_id(label_id) expected.set_title('Icol Diston') artist = expected.create_artist() artist.set_name('Arovane') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) artist = expected.create_artist() artist.set_name('Dynamo') artist.set_various(False) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre('Techno') expected.append_genre('Electronic') expected.append_genre('Ambient') expected.append_genre('Electro') expected.append_genre('Freeform') expected.set_url('http://www.musik-sammler.de/media/512755') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('I.O.') track.set_length(374) track_artist = expected.create_artist() track_artist.set_name('Arovane') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Parf') track.set_length(374) track_artist = expected.create_artist() track_artist.set_name('Arovane') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Torn') track.set_length(417) track_artist = expected.create_artist() track_artist.set_name('Arovane') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Andar') track.set_length(464) track_artist = expected.create_artist() track_artist.set_name('Arovane') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Icol Diston') track.set_length(19) track_artist = expected.create_artist() track_artist.set_name('Arovane') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Yua:E') track.set_length(491) track_artist = expected.create_artist() track_artist.set_name('Arovane') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Icol Vern') track.set_length(303) track_artist = expected.create_artist() track_artist.set_name('Arovane') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Nacrath') track.set_length(298) track_artist = expected.create_artist() track_artist.set_name('Arovane') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('Acval') track.set_length(306) track_artist = expected.create_artist() track_artist.set_name('Arovane') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title(u'Au\xdfen Vor Amx') track.set_length(560) track_artist = expected.create_artist() track_artist.set_name('Dynamo') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('11') track.set_title('No. 8 Amx') track.set_length(825) track_artist = expected.create_artist() track_artist.set_name(None) track_artist.set_various(True) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/512755') r = s.get_result() self.assertEqual(expected, r) def test_various_artists(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date(None) release_event.set_country('Deutschland') expected.append_release_event(release_event) expected.set_format(u'2-CD, Erstauflage') label_id = expected.create_label_id() label_id.set_label('BCM Records GmbH') label_id.append_catalogue_nr('55359') expected.append_label_id(label_id) expected.set_title('Grooves Loops & Patterns Vol.1 + Vol.2') artist = expected.create_artist() artist.set_name(None) artist.set_various(True) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre('Techno') expected.append_genre('Electronic') expected.append_genre('Breakbeat') expected.append_genre('Electro') expected.set_url('http://www.musik-sammler.de/media/313881') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Latin Disco [130 Bpm]') track.set_length(174) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Straight Disco [131 Bpm]') track.set_length(187) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Medium Disco [116 Bpm]') track.set_length(195) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Slow Disco [87 Bpm]') track.set_length(215) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('UK Happy Disco I [118 Bpm]') track.set_length(238) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('UK Happy Disco II [116 Bpm]') track.set_length(242) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('UK Happy Disco III [121 Bpm]') track.set_length(250) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Sexy Disco [107 Bpm]') track.set_length(288) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('Ethno Disco [98 Bpm]') track.set_length(275) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Us Disco [120 Bpm]') track.set_length(160) disc.append_track(track) track = disc.create_track() track.set_number('11') track.set_title('Cuba Disco [122 Bpm]') track.set_length(169) disc.append_track(track) track = disc.create_track() track.set_number('12') track.set_title('Dance Floor Disco I [125 Bpm]') track.set_length(242) disc.append_track(track) track = disc.create_track() track.set_number('13') track.set_title('Dance Floor Disco II [122,5 Bpm]') track.set_length(240) disc.append_track(track) expected.append_disc(disc) disc = expected.create_disc() disc.set_number(2) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('Straight Rock [120 Bpm]') track.set_length(175) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Medium Rock [132 Bpm]') track.set_length(158) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Fast Rock [160 Bpm]') track.set_length(162) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Rock Ballad [71 Bpm]') track.set_length(238) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Medium Rock Balad [106 Bpm]') track.set_length(195) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Funk Rock [108 Bpm]') track.set_length(191) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Latin Rock [122 Bpm]') track.set_length(175) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Hard Rock Shuffle [132 Bpm]') track.set_length(158) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('Medium Rock Shuffle [99 Bpm]') track.set_length(170) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Rhythm & Blues [118 Bpm]') track.set_length(159) disc.append_track(track) track = disc.create_track() track.set_number('11') track.set_title('5/4 Freak Rock [165 Bpm]') track.set_length(140) disc.append_track(track) track = disc.create_track() track.set_number('12') track.set_title('Rockabilly [123 Bpm]') track.set_length(154) disc.append_track(track) track = disc.create_track() track.set_number('13') track.set_title('Country Rock [92 Bpm]') track.set_length(204) disc.append_track(track) expected.append_disc(disc) s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/313881') r = s.get_result() self.assertEqual(expected, r) def test_va_album(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('1986') release_event.set_country('USA') expected.append_release_event(release_event) expected.set_format(u'LP') label_id = expected.create_label_id() label_id.set_label('Capitol Records, Inc.') label_id.append_catalogue_nr('SV-12499') expected.append_label_id(label_id) expected.set_title('Iron Eagle - Original Motion Picture Soundtrack') artist = expected.create_artist() artist.set_name(None) artist.set_various(True) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre('Soundtrack') expected.set_url('http://www.musik-sammler.de/media/43567') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('One Vision') track.set_length(240) track_artist = expected.create_artist() track_artist.set_name('Queen') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Iron Eagle (Never Say Die)') track.set_length(208) track_artist = expected.create_artist() track_artist.set_name('King Kobra') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('These Are The Good Times') track.set_length(225) track_artist = expected.create_artist() track_artist.set_name('Eric Martin') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('Maniac House') track.set_length(294) track_artist = expected.create_artist() track_artist.set_name('Katrina & The Waves') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Intense') track.set_length(270) track_artist = expected.create_artist() track_artist.set_name('George Clinton') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Hide The Rainbow') track.set_length(235) track_artist = expected.create_artist() track_artist.set_name('Dio') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title("It's Too Late") track.set_length(186) track_artist = expected.create_artist() track_artist.set_name('Helix') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title('Road Of The Gypsy') track.set_length(268) track_artist = expected.create_artist() track_artist.set_name('Adrenalin') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('Love Can Make You Cry') track.set_length(258) track_artist = expected.create_artist() track_artist.set_name('Urgent') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('This Raging Fire') track.set_length(246) track_artist = expected.create_artist() track_artist.set_name('Jon Butcher Axis') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/43567') r = s.get_result() self.assertEqual(expected, r) def test_featuring_track_artist(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('2008') release_event.set_country('Deutschland') expected.append_release_event(release_event) expected.set_format(u'CD, Heftbeilage, Digipak') label_id = expected.create_label_id() label_id.set_label('Batbeliever Releases') label_id.append_catalogue_nr('BAT 048') expected.append_label_id(label_id) expected.set_title('Gothic File 05') artist = expected.create_artist() artist.set_name(None) artist.set_various(True) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre('Dark Wave') expected.append_genre('Gothic') expected.set_url('http://www.musik-sammler.de/media/257802') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title('O Varium Fortune') track.set_length(352) track_artist = expected.create_artist() track_artist.set_name('Corvus Corax') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('Zaubererbruder [EP-Version]') track.set_length(285) track_artist = expected.create_artist() track_artist.set_name('ASP') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Eric Fish') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Mein Gral') track.set_length(236) track_artist = expected.create_artist() track_artist.set_name('Megaherz') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title(u'Komm S\xfc\xdfer Tod') track.set_length(275) track_artist = expected.create_artist() track_artist.set_name('Eisbrecher') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('Get Some Sleep [Exclusive Version]') track.set_length(252) track_artist = expected.create_artist() track_artist.set_name('Mono Inc.') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Prayer Before Birth [Underwaterpilots Remix]') track.set_length(263) track_artist = expected.create_artist() track_artist.set_name('Anne Clark') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('Haufenweise Scheisse (XL)') track.set_length(320) track_artist = expected.create_artist() track_artist.set_name(u'Grossstadtgefl\xfcster') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title(u'D\xe9cadence') track.set_length(191) track_artist = expected.create_artist() track_artist.set_name('Charles De Goal') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('Burning Up') track.set_length(248) track_artist = expected.create_artist() track_artist.set_name('Ladytron') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Horizon [Remastered]') track.set_length(326) track_artist = expected.create_artist() track_artist.set_name('Black Orchid') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('11') track.set_title('Play Games') track.set_length(212) track_artist = expected.create_artist() track_artist.set_name('The Rorschach Garden') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('12') track.set_title('All Ends') track.set_length(256) track_artist = expected.create_artist() track_artist.set_name('Imatem') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('13') track.set_title('All About The Now') track.set_length(289) track_artist = expected.create_artist() track_artist.set_name('Miserylab') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('14') track.set_title('Hymn Of The Shades') track.set_length(202) track_artist = expected.create_artist() track_artist.set_name('Descendants Of Cain') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('15') track.set_title('Bleed') track.set_length(244) track_artist = expected.create_artist() track_artist.set_name('ELA') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('16') track.set_title('Never Stop Crying') track.set_length(254) track_artist = expected.create_artist() track_artist.set_name("Jennie Tebler's Out Of Oblivion") track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('17') track.set_title('Killhoney') track.set_length(299) track_artist = expected.create_artist() track_artist.set_name('End Of Green') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/257802') r = s.get_result() self.assertEqual(expected, r) def test_album_with_multiple_unsplit_artist_names(self): expected = ReleaseResult() expected.set_scraper_name(None) release_event = expected.create_release_event() release_event.set_date('2005') release_event.set_country('USA') expected.append_release_event(release_event) expected.set_format(u'CD') label_id = expected.create_label_id() label_id.set_label('G Unit / Interscope Records') expected.append_label_id(label_id) expected.set_title("Get Rich Or Die Tryin' (Music From And Inspired By The Motion Picture)") artist = expected.create_artist() artist.set_name(None) artist.set_various(True) artist.append_type(expected.ArtistTypes.MAIN) expected.append_release_artist(artist) expected.append_genre('Soundtrack') expected.set_url('http://www.musik-sammler.de/media/154887') disc = expected.create_disc() disc.set_number(1) disc.set_title(None) track = disc.create_track() track.set_number('1') track.set_title("Hustler's Ambition") track.set_length(None) track_artist = expected.create_artist() track_artist.set_name('50 Cent') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('2') track.set_title('What If') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name('50 Cent') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('3') track.set_title('Things Change') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name('Spider Loc') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('50 Cent & Lloyd Banks') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('4') track.set_title('You Already Know') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name('Lloyd Banks') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('50 Cent & Young Buck') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('5') track.set_title('When Death Becomes You') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name('M.O.P.') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('50 Cent') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('6') track.set_title('Have A Party') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name('Mobb Deep') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('50 Cent & Nate Dogg') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('7') track.set_title('We Both Think Alike') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name('50 Cent') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Olivia') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('8') track.set_title("Don't Need No Help") track.set_length(None) track_artist = expected.create_artist() track_artist.set_name('Young Buck') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('9') track.set_title('Get Low') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name('Lloyd Banks') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('10') track.set_title('Fake Love') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name('Tony Yayo') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('11') track.set_title('Window Shopper') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name('50 Cent') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('12') track.set_title('Born Alone, Die Alone') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name('Lloyd Banks') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('13') track.set_title('You A Shooter') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name('Mobb Deep') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('50 Cent') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('14') track.set_title("I Don't Know Officer") track.set_length(None) track_artist = expected.create_artist() track_artist.set_name('50 Cent') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) track_artist = expected.create_artist() track_artist.set_name('Lloyd Banks, Prodigy, Spider Loc & Mase') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.FEATURING) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('15') track.set_title('Talk About Me') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name('50 Cent') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('16') track.set_title('When It Rains It Pours') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name('50 Cent') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('17') track.set_title('Best Friend') track.set_length(None) track_artist = expected.create_artist() track_artist.set_name('50 Cent') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) track = disc.create_track() track.set_number('18') track.set_title("I'll Whip Ya Head Boy") track.set_length(None) track_artist = expected.create_artist() track_artist.set_name('50 Cent & Young Buck') track_artist.set_various(False) track_artist.append_type(expected.ArtistTypes.MAIN) track.append_artist(track_artist) disc.append_track(track) expected.append_disc(disc) s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/154887') r = s.get_result() self.assertEqual(expected, r) def test_404(self): expected = NotFoundResult() expected.set_scraper_name(None) s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/99999999999999') r = s.get_result() self.assertEqual(expected, r) def test_search_scraper(self): s = musiksammler.SearchScraper('love') r = s.get_result() self.assertTrue(len(r.get_items()) > 0)<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # This file is part of Radicale Server - Calendar Server # Copyright © 2008 Nicolas Kandel # Copyright © 2008 Pascal Halter # Copyright © 2008-2013 Guillaume Ayoub # # This library is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Radicale. If not, see <http://www.gnu.org/licenses/>. """ Authentication management. """ import sys from .. import config, log def load(): """Load list of available authentication managers.""" auth_type = config.get("auth", "type") log.LOGGER.debug("Authentication type is %s" % auth_type) if auth_type == "None": return None elif auth_type == 'custom': auth_module = config.get("auth", "custom_handler") __import__(auth_module) module = sys.modules[auth_module] else: root_module = __import__( "auth.%s" % auth_type, globals=globals(), level=2) module = getattr(root_module, auth_type)<|fim▁hole|> sys.modules[__name__].is_authenticated = module.is_authenticated return module def is_authenticated(user, password): """Check if the user is authenticated. This method is overriden if an auth module is loaded. """ return True # Default is always True: no authentication<|fim▁end|>
# Override auth.is_authenticated
<|file_name|>statlog-rollup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2008 - 2012 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # usage: %prog file [ file [ file [...]]] # This script merges the timing data from several files into a single # aggregate which is sent to stdout. class stamp: def __init__(this, time, weight): this.time = long(time) this.weight = long(weight) def weighted_time(this): return this.time * this.weight def minimum(x, y): if x < y: return x else: return y def maximum(x, y): if x > y: return x else: return y class timing_file: def __init__(this, filename = ''): this.stamps = {} this.filename = '' this.filename = filename if this.filename: f = open(filename, 'r') this.lines = f.readlines() f.close() this.lines = [ line.strip() for line in this.lines ] for line in this.lines: space_sep = line.split() if len(space_sep) != 2: raise Exception('bad timing line in %s: %s' % (this.filename, line)) star_sep = space_sep[0].split('*') if len(star_sep) == 1: weight = 1 else: weight = star_sep[1] this.stamps[space_sep[1]] = stamp(star_sep[0], weight) def write(this): for stamp in this.stamps: print '%d*%d %s' % (this.stamps[stamp].time, this.stamps[stamp].weight, stamp) def merge(this, old): new = timing_file() minmax = ['maximum', 'minimum'] for s in this.stamps: if s in minmax: continue if s in old.stamps: total_weight = this.stamps[s].weight + old.stamps[s].weight weighted_average_time = (this.stamps[s].weighted_time() + old.stamps[s].weighted_time()) / total_weight new.stamps[s] = stamp(weighted_average_time, total_weight)<|fim▁hole|> for s in old.stamps: if s in minmax: continue if s not in this.stamps: new.stamps[s] = old.stamps[s] stamps = [this.stamps[s].time for s in this.stamps] + [old.stamps[s].time for s in old.stamps] new.stamps['maximum'] = stamp(reduce(maximum, stamps, 0), 0) if new.stamps['maximum'] > 0: new.stamps['minimum'] = stamp(reduce(minimum, stamps, new.stamps['maximum'].time), 0) return new def option_parser(): import optparse usage = "Usage: %prog file [ file [ file [...]]]" parser = optparse.OptionParser(usage = usage) general = optparse.OptionGroup(parser, 'General Options', '') # general.add_option('-i', '--input', # type = 'string', # dest = 'infile', # default = '', # help = 'use this as the input file [default: stdin]') parser.add_option_group(general) return parser if __name__ == '__main__': import optparse options, args = option_parser().parse_args() sum = timing_file() for a in args: sum = sum.merge(timing_file(a)) sum.write()<|fim▁end|>
else: new.stamps[s] = this.stamps[stamp]
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from sqlobject.dbconnection import registerConnection def builder(): import mysqlconnection return mysqlconnection.MySQLConnection def isSupported(): try: import MySQLdb except ImportError: return False<|fim▁hole|><|fim▁end|>
return True registerConnection(['mysql'], builder, isSupported)
<|file_name|>helpers.py<|end_file_name|><|fim▁begin|><|fim▁hole|> from mkt.site.helpers import page_title @register.function @jinja2.contextfunction def operators_page_title(context, title=None): section = _lazy('Operator Dashboard') title = u'%s | %s' % (title, section) if title else section return page_title(context, title)<|fim▁end|>
import jinja2 from jingo import register from tower import ugettext_lazy as _lazy
<|file_name|>client.rs<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Ethcore (UK) Ltd. // This file is part of Parity. // Parity is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Parity is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Parity. If not, see <http://www.gnu.org/licenses/>. //! Hash-addressed content resolver & fetcher. use std::{io, fs}; use std::sync::Arc; use std::path::PathBuf; use util::{Mutex, H256, sha3}; use fetch::{Fetch, FetchError, Client as FetchClient}; use urlhint::{ContractClient, URLHintContract, URLHint, URLHintResult}; /// API for fetching by hash. pub trait HashFetch { /// Fetch hash-addressed content. /// Parameters: /// 1. `hash` - content hash /// 2. `on_done` - callback function invoked when the content is ready (or there was error during fetch) /// /// This function may fail immediately when fetch cannot be initialized or content cannot be resolved. fn fetch(&self, hash: H256, on_done: Box<Fn(Result<PathBuf, Error>) + Send>) -> Result<(), Error>; } /// Hash-fetching error. #[derive(Debug)] pub enum Error { /// Hash could not be resolved to a valid content address. NoResolution, /// Downloaded content hash does not match. HashMismatch { expected: H256, got: H256 }, /// IO Error while validating hash. IO(io::Error), /// Error during fetch. Fetch(FetchError), } impl From<FetchError> for Error { fn from(error: FetchError) -> Self { Error::Fetch(error) } } impl From<io::Error> for Error { fn from(error: io::Error) -> Self { Error::IO(error) } } /// Default Hash-fetching client using on-chain contract to resolve hashes to URLs. pub struct Client { contract: URLHintContract, fetch: Mutex<FetchClient>, } impl Client { /// Creates new instance of the `Client` given on-chain contract client. pub fn new(contract: Arc<ContractClient>) -> Self { Client { contract: URLHintContract::new(contract), fetch: Mutex::new(FetchClient::default()), } } } impl HashFetch for Client { fn fetch(&self, hash: H256, on_done: Box<Fn(Result<PathBuf, Error>) + Send>) -> Result<(), Error> { debug!(target: "dapps", "Fetching: {:?}", hash); let url = try!( self.contract.resolve(hash.to_vec()).map(|content| match content { URLHintResult::Dapp(dapp) => { dapp.url() }, URLHintResult::Content(content) => { content.url }, }).ok_or_else(|| Error::NoResolution) ); debug!(target: "dapps", "Resolved {:?} to {:?}. Fetching...", hash, url); self.fetch.lock().request_async(&url, Default::default(), Box::new(move |result| { fn validate_hash(hash: H256, result: Result<PathBuf, FetchError>) -> Result<PathBuf, Error> { let path = try!(result); let mut file_reader = io::BufReader::new(try!(fs::File::open(&path))); let content_hash = try!(sha3(&mut file_reader)); if content_hash != hash { Err(Error::HashMismatch{ got: content_hash, expected: hash }) } else { Ok(path) } } debug!(target: "dapps", "Content fetched, validating hash ({:?})", hash); on_done(validate_hash(hash, result)) })).map_err(Into::into) }<|fim▁hole|><|fim▁end|>
}
<|file_name|>bfe_cited_by.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ## ## This file is part of Invenio. ## Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """BibFormat element - Prints reference to documents citing this one """ __revision__ = "$Id$" import cgi def format_element(bfo, separator='; '): """ Prints a list of records citing this record @param separator: a separator between citations """ from urllib import quote from invenio.config import CFG_SITE_URL primary_report_numbers = bfo.fields('037__a') additional_report_numbers = bfo.fields('088__a') primary_citations = ['<a href="' + CFG_SITE_URL + \ '/search?f=reference&amp;p=' + quote(report_number) + \ '&amp;ln='+ bfo.lang +'">' + \ cgi.escape(report_number) + '</a>' \ for report_number in primary_report_numbers] additional_citations = ['<a href="' + CFG_SITE_URL + \ '/search?f=reference&amp;p=' + quote(report_number)+ \ '&amp;ln='+ bfo.lang + '">' + \ cgi.escape(report_number) + '</a>' \ for report_number in additional_report_numbers] citations = primary_citations citations.extend(additional_citations) return separator.join(citations) def escape_values(bfo): """ Called by BibFormat in order to check if output of this element should be escaped.<|fim▁hole|> """ return 0<|fim▁end|>
<|file_name|>test.py<|end_file_name|><|fim▁begin|><|fim▁hole|> token = keyring.get_password('messagesReceiver', 'skypeToken') registrationToken = keyring.get_password('messagesReceiver', 'skypeRegistrationToken') username = keyring.get_password('messagesReceiver', 'skypeUsername') password = keyring.get_password('messagesReceiver', 'skypePassword') s = Skype(token, registrationToken) if s.token == None: s.login(username, password) print "logging in..." if s.registrationToken == None: print s.createRegistrationToken() print s.subcribe() print "creating endpoint and registrationToken..." while True: data = s.pull() if data == 404: print s.createRegistrationToken() print s.subcribe() data = s.pull() if data == 400: continue messages = utils.skypeParse(data) if not messages: continue for sender, receiver, message in messages: if receiver != None: print "%s to %s" % (sender, receiver) else: print "From %s" % sender print message<|fim▁end|>
from messenger import Skype import keyring import utils
<|file_name|>function.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= """Python front-end supports for functions. NOTE: functions are currently experimental and subject to change! """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections import hashlib from tensorflow.core.framework import attr_value_pb2 from tensorflow.core.framework import function_pb2 from tensorflow.python import pywrap_tensorflow as c_api from tensorflow.python.eager import context from tensorflow.python.framework import c_api_util from tensorflow.python.framework import dtypes from tensorflow.python.framework import graph_to_function_def from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import resource_variable_ops from tensorflow.python.ops import variable_scope as vs from tensorflow.python.util import compat from tensorflow.python.util import function_utils from tensorflow.python.util import tf_contextlib from tensorflow.python.util import tf_inspect class Defun(object): """Decorator used to define TensorFlow functions. Use this decorator to make a Python function usable directly as a TensorFlow function. The decorated function must add ops to the default graph and return zero or more `Tensor` objects. Call the decorator with named arguments, one for each argument of the function to decorate, with the expected type of the argument as value. For example if the function to decorate accepts two `tf.float32` arguments named `x` and `y`, call the decorator with: @Defun(tf.float32, tf.float32) def foo(x, y): ... When you call the decorated function it will add `call` ops to the default graph and adds the definition of the function into the default graph. Because the addition of the function into the graph is deferred, the decorator can be used anywhere in the program. Any variables created inside of the function are hoisted into the outer graph. Note that the variables are created in the variable scope that was active during the first call to the function. Subsequent function calls will refer to the same set of variables. Definitions of functions in a graph are frozen as soon as the graph is used to create a session. However, new functions and new calls to existing functions may be added to the graph, with the new functions themselves becoming immediately frozen. Example, but also see the [How To on functions](link_needed). ```python # Defining the function. @tf.Defun(tf.float32, tf.float32) def MyFunc(x, y): return x + y, x - y # Building the graph. a = tf.constant([1.0]) b = tf.constant([2.0]) c, d = MyFunc(a, b, name='mycall') ``` """ def __init__(self, *input_types, **kwargs): """Create a `Defun` decorator. Args: *input_types: A list of `tf.DType` **kwargs: Optional keyword arguments, including func_name - (optional). A python string, the name to use to declare this `Function` in the graph. grad_func - (optional). A function implementing the gradient of the function-to-register. This is must be a `_DefinedFunction` object. The gradient function must satisfy the criterion defined in function.proto:GradientDef. python_grad_func - (optional). A function implementing the gradient of the function python-side. This function must take the current op and the gradients w.r.t. its outputs, and return the gradients w.r.t. the inputs. That is it must implement the interface expected by `tf.RegisterGradient`). This will be called by tf.gradients to add the gradient ops to the graph. At most one of grad_func and python_grad_func can be specified. out_names = (optional). A list of strings, one per output tensor. shape_func - (optional). A function taking the op and returning a list of static shapes to set for the function's outputs. """ self._input_types = input_types self._func_name = kwargs.pop("func_name", None) self._grad_func = kwargs.pop("grad_func", None) self._python_grad_func = kwargs.pop("python_grad_func", None) self._out_names = kwargs.pop("out_names", None) self._extra_kwargs = kwargs def __call__(self, func): # Various sanity checks on the callable func. if not callable(func): raise ValueError("func %s must be callable" % func) # Func should not use kwargs and defaults. argspec = tf_inspect.getargspec(func) if argspec.keywords or argspec.defaults: raise ValueError("Functions with argument defaults or keyword " "arguments are not supported.") # Computes how many arguments 'func' has. min_args = len(argspec.args) max_args = min_args if argspec.varargs: max_args = 1000000 argnames = argspec.args if tf_inspect.ismethod(func): # 1st argument is the "class" type. min_args -= 1 argnames = argnames[1:] if self._input_types: # If Defun is given a list of types for the inputs, the number # of input types should be compatible with 'func'. num = len(self._input_types) if num < min_args or num > max_args: raise ValueError( "The function has fewer arguments than the number of specified " "input types.") return _DefinedFunction( func, argnames, self._input_types, self._func_name, self._grad_func, self._python_grad_func, out_names=self._out_names, **self._extra_kwargs) # 'func' expects no arguments and input types is an empty list. if min_args == 0 and max_args == 0: return _DefinedFunction( func, [], [], self._func_name, self._grad_func, self._python_grad_func, out_names=self._out_names, **self._extra_kwargs) # Input types are unknown. It's an overloaded function and hence # its definition needs to be deferred until it's called. return _OverloadedFunction( func, argnames, self._func_name, self._grad_func, self._python_grad_func, out_names=self._out_names, **self._extra_kwargs) class _DefinedFunction(object): """_DefinedFunction encapsulates a function definition and its properties. Attributes: name: The function name. definition: The definition of this function. A FunctionDef proto. grad_func_name: If not None, the name of this function's gradient function. python_grad_func: A python callable implementing the gradient of the function python-side. """ def __init__(self, func, argnames, input_types, func_name=None, grad_func=None, python_grad_func=None, out_names=None, shape_func=None, capture_by_value=False, **kwargs): """Creates _DefinedFunction. Args: func: A python callable which constructs a tf function body. argnames: A list of strings for function argument names. input_types: The function's argument types. Can be a tuple, list of tf data types. func_name: The function name. Defaults to None, in which derives from 'func'. grad_func: This function's gradient function, if not None. Defaults to None. python_grad_func: A python callable implementing the gradient of the function python-side. out_names: An optional list of strings for the function return value names. shape_func: An optional function mapping an op to a list of static output shapes. capture_by_value: Boolean (defaults to False). If True, captured values will be copied into the function body. **kwargs: The keyword arguments. **kwargs is passed to every call site of this function. Raises: ValueError: The function definition is invalid. """ self._func = func self._input_types = input_types self._func_name = func_name self._grad_func = grad_func self._python_grad_func = python_grad_func self._out_names = out_names self._shape_func = shape_func self._capture_by_value = capture_by_value self._extra_kwargs = kwargs # Constructed only when C API is disabled, lazily self._definition = None # Constructed only when C API is enabled, lazily self._c_func = None self._sub_functions = dict() # Constructed with _definition or _c_func # pylint: disable=protected-access device_funcs = ops.get_default_graph()._device_functions_outer_to_inner # pylint: enable=protected-access # Get the innermost device if possbile. self._caller_device = device_funcs[-1] if device_funcs else None # Cached OpDef for this function. When C API is enabled, this is # the only part of FunctionDef that we cache in Python. When C API # is disabled the whole _definition is available and this is simply # another reference to _definition.signature self._op_def = None assert isinstance(input_types, (list, tuple)) self._arg_types = input_types self._arg_names = [argnames[i] if i < len(argnames) else ("arg%d" % i) for i in range(len(input_types))] @property def name(self): """Function name.""" self._create_definition_if_needed() return self._func_name @property def definition(self): """Function definition proto.""" self._create_definition_if_needed() if self._c_func: with c_api_util.tf_buffer() as buf: c_api.TF_FunctionToFunctionDef(self._c_func.func, buf) fdef = function_pb2.FunctionDef() proto_data = c_api.TF_GetBuffer(buf) fdef.ParseFromString(compat.as_bytes(proto_data)) return fdef return self._definition @property def _signature(self): self._create_definition_if_needed() return self._op_def def set_grad_func(self, grad_func): """Specifies the gradient function of this function.""" assert not self._grad_func assert isinstance(grad_func, _DefinedFunction) self._grad_func = grad_func @property def grad_func_name(self): """Its gradient function's name.""" return self._grad_func.name if self._grad_func else None @property def python_grad_func(self): """Python gradient function callable.""" return self._python_grad_func @property def declared_input_types(self): """Returns the list of data types of explicit declared inputs.""" return self._input_types @property def captured_inputs(self): """Returns the list of implicitly captured inputs.""" self._create_definition_if_needed() return self._extra_inputs @property def stateful_ops(self): """Returns the list of stateful ops in function definition. Returns: A list of (op.name, op.type) pairs. """ self._create_definition_if_needed() return self._stateful_ops def _create_definition_if_needed(self): """Creates the function definition if it's not created yet.""" with context.graph_mode(): self._create_definition_if_needed_impl() def _create_definition_if_needed_impl(self): """This is not what you want, see _create_definition_if_needed.""" if self._definition is not None or self._c_func is not None: return temp_graph = func_graph_from_py_func( self._func, self._arg_names, self._arg_types, self._func_name, self._capture_by_value, self._caller_device) self._extra_inputs = temp_graph.extra_inputs # pylint: disable=protected-access self._sub_functions = temp_graph._functions # pylint: enable=protected-access # Extra kwargs are treated as attrs on the function def. if self._func_name: base_func_name = self._func_name else: base_func_name = function_utils.get_func_name(self._func) if self._grad_func: base_func_name += ("_%s" % self._grad_func.name) kwargs_attr = _parse_kwargs_as_attrs(base_func_name, **self._extra_kwargs) if not temp_graph._c_graph: # pylint: disable=protected-access # Build the FunctionDef self._definition = graph_to_function_def.graph_to_function_def( temp_graph, temp_graph.get_operations(), temp_graph.inputs, temp_graph.outputs, out_names=self._out_names) for k in kwargs_attr: self._definition.attr[k].CopyFrom(kwargs_attr[k]) # Hash the definition and its dependencies. self._hash_str = self._create_hash_str( self._definition.signature.input_arg, self._definition.signature.output_arg, self._definition.node_def) # Finally, we decide the function name to use. If not specified, # make up something which is almost certainly unique (but deterministic). if not self._func_name: self._func_name = "_".join([base_func_name, self._hash_str]) self._definition.signature.name = self._func_name if self._func.__doc__: self._definition.signature.description = self._func.__doc__ self._op_def = self._definition.signature else: # C API is enabled output_names = ([compat.as_bytes(x) for x in self._out_names] if self._out_names else []) description = self._func.__doc__ or None # pylint: disable=protected-access c_func = c_api.TF_GraphToFunction_wrapper( temp_graph._c_graph, base_func_name, self._func_name is None, # append_hash_to_fn_name None, # opers [t._as_tf_output() for t in temp_graph.inputs], [t._as_tf_output() for t in temp_graph.outputs], output_names, None, # opts description) self._c_func = c_api_util.ScopedTFFunction(c_func) # pylint: enable=protected-access self._set_c_attrs(kwargs_attr) # Set cached fields: _op_def and _func_name (if not already set) self._op_def = self.definition.signature if self._func_name: assert self._func_name == self._op_def.name else: self._func_name = compat.as_str(self._op_def.name) self._stateful_ops = [(op.name, op.type) for op in temp_graph.get_operations() if op.op_def.is_stateful] def _set_c_attrs(self, attrs): """Sets `attrs` as attributes of self._c_func. Requires that self._c_func is not None. Args: attrs: a dictionary from attribute name to attribute proto value """ for name, attr_value in attrs.items(): serialized = attr_value.SerializeToString() # TODO(skyewm): this creates and deletes a new TF_Status for every attr. # It might be worth creating a convenient way to re-use the same status. c_api.TF_FunctionSetAttrValueProto(self._c_func.func, compat.as_str(name), serialized) def _create_hash_str(self, input_arg, output_arg, node_def): """Creates an 8-character string unique to this input. Args: input_arg: the input_arg field of an OpDef (e.g. self._definition.signature.input_arg) output_arg: the output_arg field of an OpDef (e.g. self._definition.signature.output_arg) node_def: the node_def field of a FunctionDef (e.g. self._definition.node_def) Returns: The unique string for this input """ hasher = hashlib.sha1() def update_num(n): hasher.update(compat.as_bytes("%x" % n)) def update_str(s): update_num(len(s)) hasher.update(compat.as_bytes(s)) def update_strs(slist): update_num(len(slist)) for s in slist: update_str(s) for adef in input_arg: update_str(adef.SerializeToString()) for adef in output_arg: update_str(adef.SerializeToString()) for n in sorted(node_def, key=lambda n: n.name): update_str(n.name) update_str(n.op) update_strs(n.input) update_num(len(n.attr)) # NOTE: protobuf map serialization does not guarantee ordering. for k in sorted(n.attr): update_str(k) update_str(n.attr[k].SerializeToString()) return hasher.hexdigest()[:8] def add_to_graph(self, g): """Adds this function into the graph g.""" self._create_definition_if_needed() # Adds this function into 'g'. # pylint: disable=protected-access if context.executing_eagerly(): context.context().add_function_def(self.definition) else: g._add_function(self) # pylint: enable=protected-access # Ensures related sub-routines are defined in 'g', too. for f in self._sub_functions.values(): f.add_to_graph(g) # Adds its gradient function, too. if self._grad_func: self._grad_func.add_to_graph(g) def __call__(self, *args, **kwargs): self.add_to_graph(ops.get_default_graph()) args = [ops.convert_to_tensor(_) for _ in args] + self._extra_inputs ret, op = _call(self._signature, *args, **kwargs) # Set a hidden attr in 'op' so that gradients_impl can refer back # to this _DefinedFunction instance to access python_grad_func. assert isinstance(op, ops.Operation) setattr(op, "__defun", self) if self._shape_func is not None: shapes = self._shape_func(op) if len(shapes) != len(op.outputs): raise ValueError("shape_func produced %d shapes for %d outputs" % (len(shapes), len(op.outputs))) for (t, shape) in zip(op.outputs, shapes): t.set_shape(shape) return ret class _OverloadedFunction(object): """_OverloadedFunction encapsulates an overloaded function. _OverloadedFunction maintains a mapping from input types to instantiated _DefinedFunction in self._overload. """ def __init__(self, func, argnames, func_name=None, grad_func=None, python_grad_func=None, out_names=None, **kwargs): """Creates _DefinedFunction. Args: func: A python callable which constructs a tf function body. argnames: A list of strings for function argument names. func_name: The function name. Defaults to None, in which derives from 'func'. grad_func: This function's gradient function, if not None. Defaults to None. python_grad_func: A python callable implementing the gradient of the function python-side. out_names: A list of strings for the function return value names. **kwargs: The keyword arguments. **kwargs is passed to every call site of this function. <|fim▁hole|> Raises: ValueError: The function definition is invalid. """ self._func = func self._argnames = argnames self._func_name = func_name assert grad_func is None or isinstance(grad_func, _OverloadedFunction) self._grad_func = grad_func self._python_grad_func = python_grad_func self._out_names = out_names self._extra_kwargs = kwargs self._overload = {} def instantiate(self, input_types): """Instantiate this function given input argument types. Args: input_types: A list of data types for the inputs. Returns: _DefinedFunction for the given input types. """ # Stringify the type list. key = _type_list_to_str(input_types) defined = self._overload.get(key) if not defined: # If not defined yet, define the function given the input types. name = self._func_name if name is not None: name = "_".join([name, key]) defined = _DefinedFunction( self._func, self._argnames, input_types, name, None, self._python_grad_func, out_names=self._out_names, **self._extra_kwargs) _ = defined.name # Fully instantiate the function definition. if self._grad_func: # If _grad_func is given, it is another # _OverloadedFunction. We need to instantiate it with the # right input types. output_types = [ dtypes.DType(_.type) for _ in defined._signature.output_arg # pylint: disable=protected-access ] # pylint: disable=protected-access defined._grad_func = self._grad_func.instantiate(input_types + output_types) # pylint: enable=protected-access self._overload[key] = defined return defined def __call__(self, *args, **kwargs): input_types = [] args = list(args) for (i, x) in enumerate(args): x = ops.convert_to_tensor(x) if not isinstance(x, ops.Tensor): raise ValueError("Expect a Tensor but get ", x) input_types.append(x.dtype) args[i] = x return self.instantiate(input_types)(*args, **kwargs) class _FuncGraph(ops.Graph): """A helper for constructing a function. _FuncGraph overrides ops.Graph's create_op() so that we can keep track of all inputs into every op created inside the function. If any input is from other graphs, we keep track of it in self.capture and substitute the input with a place holder. Each captured input's corresponding place holder is converted into a function argument and the caller passes in the captured tensor. """ def __init__(self, name, capture_by_value, *args, **kwargs): super(_FuncGraph, self).__init__(*args, **kwargs) self._capture_by_value = capture_by_value self._building_function = True self._outer_graph = ops.get_default_graph() self._vscope = vs.get_variable_scope() self._old_custom_getter = self._vscope.custom_getter # The name of the function. self.name = name # Placeholder tensors representing the inputs to this function. The tensors # are in this _FuncGraph. self.inputs = [] # Tensors that will be returned this function. The tensors are in this # _FuncGraph. self.outputs = [] # Maps external tensor -> internal tensor (e.g. input placeholder). self._captured = {} # The external tensors that have been captured as inputs and must be passed # to this function (empty if capturing by value, otherwise these are the # keys of _captured). self.extra_inputs = [] # Input placeholders that been added for captured values (empty if capturing # by value). self.extra_args = [] # Captured variables. # TODO(skyewm): is this needed? self.extra_vars = [] # pylint: disable=g-doc-return-or-yield @tf_contextlib.contextmanager def container(self, container_name): """Returns a context manager that specifies the resource container to use. Overridden from `tf.Graph` to update both the init_scope container and the present inner container. This is necessary to make sure setting containers applies correctly both to created variables and to stateful ops. Args: container_name: container name string. Returns: A context manager for defining resource containers for stateful ops, yields the container name. """ original_container = self._container # pylint: disable=protected-access with ops.init_scope(): original_init_container = ops.get_default_graph()._container try: self._container = container_name with ops.init_scope(): ops.get_default_graph()._container = container_name yield self._container finally: self._container = original_container with ops.init_scope(): ops.get_default_graph()._container = original_init_container # pylint: enable=protected-access # pylint: enable=g-doc-return-or-yield def getvar( self, getter, name, shape=None, dtype=None, initializer=None, reuse=None, trainable=True, collections=None, # pylint: disable=redefined-outer-name use_resource=None, **kwargs): """A custom variable getter.""" # Here, we switch the default graph to the outer graph and ask the # variable scope in which the function is defined to give us the # variable. The variable is stashed in extra_vars and returned to # the caller. # # We capture these variables so that the variable definition is # hoisted upward to the outer most graph. with self._outer_graph.as_default(): # pylint: disable=protected-access var = self._vscope.get_variable( vs._get_default_variable_store(), name, shape=shape, dtype=dtype, initializer=initializer, reuse=reuse, trainable=trainable, collections=collections, use_resource=use_resource) self.extra_vars.append(var) if isinstance(var, resource_variable_ops.ResourceVariable): # For resource-based variables read the variable outside the function # and pass in the value. This ensures that the function is pure and # differentiable. TODO(apassos) this may have performance problems if # the function will only do embedding lookups on the variable. return var.value() return var def create_op(self, op_type, inputs, data_types, **kwargs): for i, x in enumerate(inputs): if isinstance(x, ops.EagerTensor) or x.graph is not self: inputs[i] = self.capture(x) return super(_FuncGraph, self).create_op(op_type, inputs, data_types, **kwargs) def capture(self, tensor, name=None): """Adds the given tensor to this graph and returns the captured tensor.""" if tensor in self._captured: # Captured already. return self._captured[tensor] elif self._capture_by_value: return self._add_tensor_and_parents(tensor) else: return self._capture_tensor_as_extra_input(tensor, name) def _capture_tensor_as_extra_input(self, tensor, name=None): # Substitute with a placeholder. self.extra_inputs.append(tensor) # Hoist the new input placeholder out of any control flow context # we're currently in. with ops.control_dependencies(None): ph = array_ops.placeholder( tensor.dtype, shape=tensor.get_shape(), name=name) # pylint: disable=protected-access if ops._USE_C_SHAPES: if isinstance(tensor, ops.EagerTensor): handle_data = tensor._handle_data if handle_data: handle_data = handle_data.SerializeToString() else: handle_data = c_api.GetHandleShapeAndType(tensor.graph._c_graph, tensor._as_tf_output()) if handle_data: c_api.SetHandleShapeAndType(ph.graph._c_graph, ph._as_tf_output(), compat.as_bytes(handle_data)) else: ph._handle_data = tensor._handle_data # pylint: enable=protected-access self.inputs.append(ph) self._captured[tensor] = ph self.extra_args.append(ph) if _is_guaranteed_const(tensor): with ops.control_dependencies(None): return array_ops.guarantee_const(ph) else: return ph def _add_tensor_and_parents(self, tensor): op = self._add_op_and_parents(tensor.op) return op.outputs[tensor.value_index] def _add_op_and_parents(self, op): # pylint: disable=protected-access op_def = graph_to_function_def._get_op_def(op) # pylint: enable=protected-access if op_def.is_stateful: raise ValueError("Cannot capture a stateful node (name:%s, type:%s) " "by value." % (op.name, op.type)) elif op.type in ("Placeholder", "PlaceholderV2"): raise ValueError("Cannot capture a placeholder (name:%s, type:%s) " "by value." % (op.name, op.type)) captured_inputs = [self._add_tensor_and_parents(x) for x in op.inputs] captured_op = self.create_op( op.type, captured_inputs, [o.dtype for o in op.outputs], name=op.name, attrs=op.node_def.attr, op_def=op_def) for t, captured_t in zip(op.outputs, captured_op.outputs): self._captured[t] = captured_t return captured_op def func_graph_from_py_func(func, arg_names, arg_types, name=None, capture_by_value=False, device=None, colocation_stack=None, container=None, collections_ref=None, arg_shapes=None): """Returns a _FuncGraph generated from `func`. Args: func: A Python callable which constructs a TF function body. The arguments must correspond to `arg_types`. Returns a value or list/tuple of values. No returned value can be None. arg_names: A sequence of strings for the function argument names. arg_types: A sequence of the function's argument types. name: The function name. If None, the name is derived from `func`. capture_by_value: boolean. If True, captured values will be copied into the function body. device: device name or function. colocation_stack: A colocation stack (list) the _FuncGraph should use. container: A container name the _FuncGraph should start with. collections_ref: A reference to a collections dict the _FuncGraph should use internally. arg_shapes: A sequence of the function's argument shapes. Returns: A _FuncGraph. Raises: ValueError: if func returns None. """ if not name: name = function_utils.get_func_name(func) func_graph = _FuncGraph(name, capture_by_value) with func_graph.as_default(), ops.device(device): # pylint: disable=protected-access if collections_ref is not None: func_graph._collections = collections_ref if container is not None: func_graph._container = container if colocation_stack is not None: func_graph._colocation_stack = colocation_stack # pylint: enable=protected-access if arg_shapes is None: arg_shapes = [None] * len(arg_types) # Create placeholders for the function arguments. for (argname, argtype, argshape) in zip(arg_names, arg_types, arg_shapes): argholder = array_ops.placeholder(argtype, shape=argshape, name=argname) func_graph.inputs.append(argholder) # Call func and gather the output tensors. with vs.variable_scope("", custom_getter=func_graph.getvar): outputs = func(*func_graph.inputs) # There is no way of distinguishing between a function not returning # anything and a function returning None in Python. # We need to allow the former and ideally want to forbid the latter as # it is most likely user error. # TODO(iga): Consider adding a @NoOutput decorator on top of @Defun to # allow users to explicitly mark the function as not returning anything. # For now, we allow a single None return and interpret it as a function # with no output. if outputs is None: outputs = [] else: # If func only returned one value, make it a tuple. if not isinstance(outputs, (list, tuple)): outputs = (outputs,) if any([_ is None for _ in outputs]): raise ValueError("Function can not return None.") # Ensures each output is a Tensor in the function graph. outputs = [ops.convert_to_tensor(t) for t in outputs] outputs = [func_graph.capture(t) if t.graph is not func_graph else t for t in outputs] func_graph.outputs = outputs return func_graph def _is_guaranteed_const(tensor): """Determines whether `tensor` is guaranteed to be a constant. A tensor is guaranteed to be a constant if either it was produced by a `GuaranteeConst` op or if all of its children are guaranteed to be constants. Args: tensor: The tensor for which to determine const-ness. Returns: True if `tensor` is guaranteed to be a constant, False otherwise. """ if isinstance(tensor, ops.EagerTensor): return False class Work(object): def __init__(self, op, leaving): self.op = op self.leaving = leaving is_guaranteed_const = lambda op: op.node_def.op == "GuaranteeConst" constants = set([]) def all_inputs_const(op): # If all inputs of an op are guaranteed constants, then we can infer that # the op produces a constant as well. return op.inputs and all(inp.op in constants for inp in op.inputs) visited = set([]) stack = [Work(tensor.op, leaving=False)] while stack: work = stack.pop() if work.leaving: if all_inputs_const(work.op): constants.add(work.op) continue visited.add(work.op) if is_guaranteed_const(work.op): constants.add(work.op) continue # This op will be revisited after all its inputs are checked for const-ness. stack.append(Work(work.op, leaving=True)) for inp in work.op.inputs: if inp.op not in visited: stack.append(Work(inp.op, leaving=False)) return tensor.op in constants def _call(sig, *inputs, **kwargs): """Adds a node calling a function. This adds a `call` op to the default graph that calls the function of signature `sig`, passing the tensors in `inputs` as arguments. It returns the outputs of the call, which are one or more tensors. `sig` is OpDefArg.a `_DefinedFunction` object. You can pass an optional keyword parameter `name=string` to name the added operation. You can pass an optional keyword parameter `noinline=True|False` to instruct the runtime not to inline the function body into the call site. Args: sig: OpDefArg. The signature of the function. *inputs: arguments to the function. **kwargs: Optional keyword arguments. Can only contain 'name' or 'noinline'. Returns: A 2-element tuple. First element: a Tensor if the function returns a single value; a list of Tensors if the function returns multiple value; the Operation if the function returns no values. Second element: the Operation. Raises: ValueError: if the arguments are invalid. """ if len(inputs) != len(sig.input_arg): raise ValueError("Expected number of arguments: %d, received: %d" % (len( sig.input_arg), len(inputs))) name = kwargs.pop("name", None) g = ops.get_default_graph() func_name = sig.name attrs = _parse_kwargs_as_attrs(func_name, **kwargs) output_types = [dtypes.DType(x.type) for x in sig.output_arg] with ops.name_scope(name, func_name, inputs) as name: op = g.create_op( func_name, list(inputs), output_types, name=name, attrs=attrs, op_def=sig, compute_shapes=False) if op.outputs: if len(op.outputs) == 1: ret = op.outputs[0] else: ret = tuple(op.outputs) else: ret = op return ret, op def _from_definition(fdef, grad_func=None): """Creates a _DefinedFunction initialized from a FunctionDef proto. Args: fdef: a FunctionDef grad_func: a _DefinedFunction or None Returns: A _DefinedFunction representing fdef """ # TODO(iga): This method does major surgery on _DefinedFunction. # Make it a named constructor using @classmethod of _DefinedFunction. # The Python callable is only needed to create a FunctionDef. Since we have # the FunctionDef here, we don't need to set _DefinedFunction._func (nor do we # have access to such a callable here). func = None argnames = [arg.name for arg in fdef.signature.input_arg] input_types = tuple( dtypes.as_dtype(arg.type) for arg in fdef.signature.input_arg) func_name = fdef.signature.name # Note: FunctionDefs do not include python gradient functions, so if the # original _DefinedFunction included one it will not be reflected here. python_grad_func = None out_names = [arg.name for arg in fdef.signature.output_arg] result = _DefinedFunction(func, argnames, input_types, func_name, grad_func, python_grad_func, out_names) # pylint: disable=protected-access serialized = fdef.SerializeToString() c_func = c_api.TF_FunctionImportFunctionDef(serialized) result._c_func = c_api_util.ScopedTFFunction(c_func) result._extra_inputs = [] # pylint: enable=protected-access return result def _from_library(lib): """Creates _DefinedFunctions initialized from a FunctionDefLibrary proto. This method handles assigning the correct gradient functions to each function. Args: lib: a FunctionDefLibrary Returns: A list of _DefinedFunctions Raises: ValueError: `lib` is invalid """ if not lib.function and not lib.gradient: return [] # function name -> FunctionDef proto funcs = {fdef.signature.name: fdef for fdef in lib.function} # Validate that all references function names have function defs for g in lib.gradient: if g.function_name not in funcs: raise ValueError("FunctionDefLibrary missing '%s' FunctionDef\n%s" % (g.function_name, str(lib))) if g.gradient_func not in funcs: raise ValueError("FunctionDefLibrary missing '%s' FunctionDef\n%s" % (g.gradient_func, str(lib))) # function name -> gradient function name func_to_grad = collections.defaultdict(lambda: None) # gradient function name -> names of functions having that grad function grad_to_funcs = collections.defaultdict(list) for gdef in lib.gradient: func_to_grad[gdef.function_name] = gdef.gradient_func grad_to_funcs[gdef.gradient_func].append(gdef.function_name) # Start with functions without gradients ready = [ fdef for fdef in lib.function if func_to_grad[fdef.signature.name] is None ] if not ready: raise ValueError( "FunctionDefLibrary contains cyclic gradient functions!\n" + str(lib)) # function name -> _DefinedFunction initialized = {} while ready: fdef = ready.pop() name = fdef.signature.name grad = initialized.get(func_to_grad[name]) if func_to_grad[name]: assert grad defined_func = _from_definition(fdef, grad_func=grad) initialized[name] = defined_func ready.extend(funcs[f] for f in grad_to_funcs[name]) return initialized.values() def _get_experimental_kwarg_as_attr(attr_name, value): """Creates an AttrValue for a python object.""" if isinstance(value, bool): return attr_value_pb2.AttrValue(b=value) elif isinstance(value, int): return attr_value_pb2.AttrValue(i=value) elif isinstance(value, float): return attr_value_pb2.AttrValue(f=value) elif isinstance(value, str): return attr_value_pb2.AttrValue(s=compat.as_bytes(value)) else: raise ValueError("Unsupported attribute type for %s with type %s" % (attr_name, type(value))) def _parse_kwargs_as_attrs(func_name, **kwargs): """Parses **kwargs into a node's attributes.""" attrs = {} noinline = kwargs.pop("noinline", None) if noinline is not None: attrs["_noinline"] = attr_value_pb2.AttrValue(b=bool(noinline)) compiled = kwargs.pop("compiled", None) separate_compiled_gradients = kwargs.pop("separate_compiled_gradients", None) if compiled is not None: attrs["_XlaCompile"] = attr_value_pb2.AttrValue(b=bool(compiled)) attrs["_XlaSeparateCompiledGradients"] = attr_value_pb2.AttrValue( b=bool(separate_compiled_gradients)) # Forward _XlaScope from enclosing context (if set), otherwise create new. # pylint: disable=protected-access if "_XlaScope" in ops.get_default_graph()._attr_scope_map: attrs["_XlaScope"] = ops.get_default_graph()._attr_scope_map["_XlaScope"] else: attrs["_XlaScope"] = attr_value_pb2.AttrValue( s=("function_%s" % func_name).encode()) # pylint: enable=protected-access kwargs_keys = list(kwargs.keys()) for key in kwargs_keys: if key.startswith("experimental_"): attrs[key] = _get_experimental_kwarg_as_attr(key, kwargs[key]) del kwargs[key] if kwargs: raise ValueError("Unknown keyword arguments: %s" % kwargs.keys()) return attrs def get_extra_vars(): """Returns the captured variables by the function. Returns: If the default graph is being used to define a function, the returned list of variables are those created inside the function body so far. Otherwise, returns an empty list. """ g = ops.get_default_graph() if isinstance(g, _FuncGraph): return g.extra_vars else: return [] def get_extra_inputs(): """Returns the captured input tensors by the function. Returns: If the default graph is being used to define a function, the returned list of tensors are those accessed inside the function body but defined outside the function body so far. Otherwise, returns an empty list. """ g = ops.get_default_graph() if isinstance(g, _FuncGraph): return g.extra_inputs else: return [] def get_extra_args(): """Returns the corresponding function arguments for the captured inputs. Returns: If the default graph is being used to define a function, the returned list of place holders are those used inside the function body corresponding those returned by get_extra_inputs(). Otherwise, returns an empty list. """ g = ops.get_default_graph() if isinstance(g, _FuncGraph): return g.extra_args else: return [] def _type_list_to_str(types): if any([_ not in _DTYPE_TO_STR for _ in types]): raise ValueError("Unsupported dtypes: %s" % types) return "".join([_DTYPE_TO_STR[_] for _ in types]) # NOTE: The list needs to be extended when more data types are added. _DTYPE_TO_STR = { dtypes.float16: "f16", dtypes.float32: "f32", dtypes.float64: "f64", dtypes.int32: "i32", dtypes.uint8: "i8", dtypes.uint16: "u16", dtypes.uint32: "u32", dtypes.uint64: "u64", dtypes.int16: "i16", dtypes.int8: "i8", dtypes.string: "s", dtypes.complex64: "c64", dtypes.complex128: "c128", dtypes.int64: "i64", dtypes.bool: "b", dtypes.qint8: "qi8", dtypes.quint8: "qu8", dtypes.qint16: "qi16", dtypes.quint16: "qu16", dtypes.qint32: "qi32", dtypes.bfloat16: "b16" } def function_def_from_tf_function(c_func): """Converts a SWIG-wrapped TF_Function* to a FunctionDef proto.""" with c_api_util.tf_buffer() as buf: c_api.TF_FunctionToFunctionDef(c_func, buf) data = c_api.TF_GetBuffer(buf) fdef = function_pb2.FunctionDef() fdef.ParseFromString(compat.as_bytes(data)) return fdef<|fim▁end|>
<|file_name|>gsi.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ This module returns stats about the DynamoDB table """ import math from datetime import datetime, timedelta from boto.exception import JSONResponseError, BotoServerError from retrying import retry from dynamic_dynamodb.aws import dynamodb from dynamic_dynamodb.log_handler import LOGGER as logger from dynamic_dynamodb.aws.cloudwatch import ( __get_connection_cloudwatch as cloudwatch_connection) def get_consumed_read_units_percent( table_name, gsi_name, lookback_window_start=15): """ Returns the number of consumed read units in percent :type table_name: str :param table_name: Name of the DynamoDB table :type gsi_name: str :param gsi_name: Name of the GSI :type lookback_window_start: int :param lookback_window_start: How many seconds to look at :returns: int -- Number of consumed reads """ try: metrics = __get_aws_metric( table_name, gsi_name, lookback_window_start, 'ConsumedReadCapacityUnits') except BotoServerError: raise if metrics: consumed_read_units = int( math.ceil(float(metrics[0]['Sum'])/float(300))) else: consumed_read_units = 0 try: consumed_read_units_percent = int( math.ceil( float(consumed_read_units) / float(dynamodb.get_provisioned_gsi_read_units( table_name, gsi_name)) * 100)) except JSONResponseError: raise logger.info('{0} - GSI: {1} - Consumed read units: {2:d}%'.format( table_name, gsi_name, consumed_read_units_percent)) return consumed_read_units_percent def get_throttled_read_event_count( table_name, gsi_name, lookback_window_start=15): """ Returns the number of throttled read events during a given time frame :type table_name: str :param table_name: Name of the DynamoDB table :type gsi_name: str :param gsi_name: Name of the GSI :type lookback_window_start: int :param lookback_window_start: How many seconds to look at :returns: int -- Number of throttled read events """ try: metrics = __get_aws_metric( table_name, gsi_name, lookback_window_start, 'ReadThrottleEvents') except BotoServerError: raise if metrics: throttled_read_events = int(metrics[0]['Sum']) else: throttled_read_events = 0 logger.info('{0} - GSI: {1} - Read throttle count: {2:d}'.format( table_name, gsi_name, throttled_read_events)) return throttled_read_events def get_consumed_write_units_percent( table_name, gsi_name, lookback_window_start=15): """ Returns the number of consumed write units in percent :type table_name: str :param table_name: Name of the DynamoDB table :type gsi_name: str :param gsi_name: Name of the GSI :type lookback_window_start: int :param lookback_window_start: How many seconds to look at :returns: int -- Number of consumed writes """ try: metrics = __get_aws_metric( table_name, gsi_name, lookback_window_start, 'ConsumedWriteCapacityUnits') except BotoServerError: raise if metrics: consumed_write_units = int( math.ceil(float(metrics[0]['Sum'])/float(300))) else: consumed_write_units = 0 try: consumed_write_units_percent = int( math.ceil( float(consumed_write_units) / float(dynamodb.get_provisioned_gsi_write_units( table_name, gsi_name)) * 100)) except JSONResponseError: raise logger.info('{0} - GSI: {1} - Consumed write units: {2:d}%'.format( table_name, gsi_name, consumed_write_units_percent)) return consumed_write_units_percent def get_throttled_write_event_count( table_name, gsi_name, lookback_window_start=15): """ Returns the number of throttled write events during a given time frame :type table_name: str :param table_name: Name of the DynamoDB table :type gsi_name: str :param gsi_name: Name of the GSI :type lookback_window_start: int :param lookback_window_start: How many seconds to look at :returns: int -- Number of throttled write events """ try: metrics = __get_aws_metric(<|fim▁hole|> if metrics: throttled_write_events = int(metrics[0]['Sum']) else: throttled_write_events = 0 logger.info('{0} - GSI: {1} - Write throttle count: {2:d}'.format( table_name, gsi_name, throttled_write_events)) return throttled_write_events @retry( wait='exponential_sleep', wait_exponential_multiplier=1000, wait_exponential_max=10000, stop_max_attempt_number=10) def __get_aws_metric(table_name, gsi_name, lookback_window_start, metric_name): """ Returns a metric list from the AWS CloudWatch service, may return None if no metric exists :type table_name: str :param table_name: Name of the DynamoDB table :type gsi_name: str :param gsi_name: Name of a GSI on the given DynamoDB table :type lookback_window_start: int :param lookback_window_start: How many minutes to look at :type metric_name str :param metric_name Name of the metric to retrieve from CloudWatch :returns: list -- A list of time series data for the given metric, may be None if there was no data """ try: now = datetime.utcnow() start_time = now-timedelta(minutes=lookback_window_start) end_time = now-timedelta(minutes=lookback_window_start-5) return cloudwatch_connection().get_metric_statistics( period=300, # Always look at 5 minutes windows start_time=start_time, end_time=end_time, metric_name=metric_name, namespace='AWS/DynamoDB', statistics=['Sum'], dimensions={ 'TableName': table_name, 'GlobalSecondaryIndexName': gsi_name }, unit='Count') except BotoServerError as error: logger.error( 'Unknown boto error. Status: "{0}". ' 'Reason: "{1}". Message: {2}'.format( error.status, error.reason, error.message)) raise<|fim▁end|>
table_name, gsi_name, lookback_window_start, 'WriteThrottleEvents') except BotoServerError: raise
<|file_name|>illumos.rs<|end_file_name|><|fim▁begin|>s! { pub struct shmid_ds { pub shm_perm: ::ipc_perm, pub shm_segsz: ::size_t, pub shm_amp: *mut ::c_void,<|fim▁hole|> pub shm_nattch: ::shmatt_t, pub shm_cnattch: ::c_ulong, pub shm_atime: ::time_t, pub shm_dtime: ::time_t, pub shm_ctime: ::time_t, pub shm_pad4: [i64; 4], } } pub const AF_LOCAL: ::c_int = 1; // AF_UNIX pub const AF_FILE: ::c_int = 1; // AF_UNIX pub const EFD_SEMAPHORE: ::c_int = 0x1; pub const EFD_NONBLOCK: ::c_int = 0x800; pub const EFD_CLOEXEC: ::c_int = 0x80000; pub const TCP_KEEPIDLE: ::c_int = 34; pub const TCP_KEEPCNT: ::c_int = 35; pub const TCP_KEEPINTVL: ::c_int = 36; pub const TCP_CONGESTION: ::c_int = 37; pub const F_OFD_GETLK: ::c_int = 50; pub const F_OFD_SETLKL: ::c_int = 51; pub const F_OFD_SETLKW: ::c_int = 52; pub const F_FLOCK: ::c_int = 55; pub const F_FLOCKW: ::c_int = 56; extern "C" { pub fn eventfd(init: ::c_uint, flags: ::c_int) -> ::c_int; pub fn mincore(addr: ::caddr_t, len: ::size_t, vec: *mut ::c_char) -> ::c_int; pub fn pset_bind_lwp( pset: ::psetid_t, id: ::id_t, pid: ::pid_t, opset: *mut ::psetid_t, ) -> ::c_int; pub fn pset_getloadavg(pset: ::psetid_t, load: *mut ::c_double, num: ::c_int) -> ::c_int; pub fn preadv(fd: ::c_int, iov: *const ::iovec, iovcnt: ::c_int, offset: ::off_t) -> ::ssize_t; pub fn pwritev(fd: ::c_int, iov: *const ::iovec, iovcnt: ::c_int, offset: ::off_t) -> ::ssize_t; }<|fim▁end|>
pub shm_lkcnt: ::c_ushort, pub shm_lpid: ::pid_t, pub shm_cpid: ::pid_t,
<|file_name|>monitor_system.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- from entropyfw import System from s_pico_tc08.module import EntropyPicoTc08 from s_tti_cpx.module import EntropyTTiCPX from s_laird_optotec_ot15.module import EntropyLairdOT15ConstantQc from .s_controller.module import EntropyController as GFAEntropyController from s_eventlogger.module import EntropyEventLogger from . import config from . import system_names __author__ = 'otger' class SystemMonitorGFAThermal(System): def __init__(self, flask_app): System.__init__(self, flask_app) self.pico = EntropyPicoTc08(name=system_names.TC08_MOD, channels=[]) self.add_module(self.pico)<|fim▁hole|> # self.add_module(self.tticpx) self.elogger = EntropyEventLogger(name=system_names.LOGGER_MOD, backup_path='/tmp') self.add_module(self.elogger) def enable_tc08_channel(self, channel, tc_type, units): self.pico.enable(channel=channel, tc_type=tc_type, units=units)<|fim▁end|>
# self.tticpx = EntropyTTiCPX(name=system_names.TTiCPX_MOD)
<|file_name|>msid.rs<|end_file_name|><|fim▁begin|>#[doc = "Register `MSID[%s]` reader"] pub struct R(crate::R<MSID_SPEC>); impl core::ops::Deref for R { type Target = crate::R<MSID_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<MSID_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<MSID_SPEC>) -> Self { R(reader) } } #[doc = "Field `INDEX` reader - Message Pending Index"] pub struct INDEX_R(crate::FieldReader<u8, u8>); impl INDEX_R { pub(crate) fn new(bits: u8) -> Self { INDEX_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for INDEX_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 }<|fim▁hole|>} impl R { #[doc = "Bits 0:5 - Message Pending Index"] #[inline(always)] pub fn index(&self) -> INDEX_R { INDEX_R::new((self.bits & 0x3f) as u8) } } #[doc = "Message Index Register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [msid](index.html) module"] pub struct MSID_SPEC; impl crate::RegisterSpec for MSID_SPEC { type Ux = u32; } #[doc = "`read()` method returns [msid::R](R) reader structure"] impl crate::Readable for MSID_SPEC { type Reader = R; } #[doc = "`reset()` method sets MSID[%s] to value 0x20"] impl crate::Resettable for MSID_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0x20 } }<|fim▁end|>
<|file_name|>showdevices.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3 """Run 'adb devices' and show results in friendly way. Runs 'adb devices' and integrates the results with environment variables DEVTAGS and ANDROID_SERIAL to show model numbers for connected devices. """ import getopt import os import re import sys import script_utils as u valid_dispositions = {"device": 1, "unauthorized": 1} flag_showall = False def read_devtags(): """Read and post-process DEVTAGS environment var.""" dt = os.getenv("DEVTAGS") chunks = dt.split(" ") sertotag = {} tagtoser = {} for chunk in chunks: (tag, ser) = chunk.split(":") if ser in sertotag: u.error("malformed DEVTAGS (more than one " "entry for serial number %s" % ser) if tag in tagtoser: u.warning("malformed DEVTAGS (more than one " "serial number for tag %s" % tag) sertotag[ser] = tag tagtoser[tag] = ser return (sertotag, tagtoser) def perform(): """Main driver routine.""" andser = os.getenv("ANDROID_SERIAL") if andser: andser = andser.strip() else: andser = "" (serial_to_tag, tag_to_serial) = read_devtags() lines = u.docmdlines("adb devices") rxd1 = re.compile(r"^\* daemon not running.+$") rxd2 = re.compile(r"^\* daemon started.+$") rx1 = re.compile(r"^\s*(\S+)\s+(\S+)\s*$") devices_found = {} for line in lines[1:]: if rxd1.match(line) or rxd2.match(line): continue m = rx1.match(line) if not m: u.warning("unable to match adb output line: %s" % line) continue ser = m.group(1) disp = m.group(2) if disp not in valid_dispositions: u.warning("unknown device disposition %s in adb " "output line: %s" % (disp, line)) sel = "" if ser == andser: sel = ">>" if ser not in serial_to_tag: tag = "???" else: tag = serial_to_tag[ser] devices_found[tag] = 1 print("%2s %8s %16s %s" % (sel, tag, ser, disp)) if flag_showall: for tag, ser in tag_to_serial.items(): if tag in devices_found: continue print("%2s %8s %16s %s" % ("", tag, ser, "<unconnected>")) def usage(msgarg=None): """Print usage and exit.""" if msgarg: sys.stderr.write("error: %s\n" % msgarg) print("""\ usage: %s [options] options: -d increase debug msg verbosity level -a show disposition for all devices, not just those connected """ % os.path.basename(sys.argv[0])) sys.exit(1) def parse_args(): """Command line argument parsing.""" global flag_showall try: optlist, _ = getopt.getopt(sys.argv[1:], "da") except getopt.GetoptError as err: # unrecognized option usage(str(err)) for opt, _ in optlist: if opt == "-d": u.increment_verbosity() elif opt == "-a": flag_showall = True <|fim▁hole|>parse_args() # Check to make sure we can run adb u.doscmd("which adb") # run perform() # done exit(0)<|fim▁end|>
# ---------main portion of script ------------- u.setdeflanglocale()
<|file_name|>checker.go<|end_file_name|><|fim▁begin|>// Copyright 2017 PingCAP, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. package ranger import ( "github.com/pingcap/tidb/ast" "github.com/pingcap/tidb/expression" "github.com/pingcap/tidb/model" "github.com/pingcap/tidb/types" ) // conditionChecker checks if this condition can be pushed to index plan. type conditionChecker struct { colName model.CIStr shouldReserve bool // check if a access condition should be reserved in filter conditions. length int } func (c *conditionChecker) check(condition expression.Expression) bool { switch x := condition.(type) { case *expression.ScalarFunction: return c.checkScalarFunction(x) case *expression.Column: return c.checkColumn(x) case *expression.Constant: return true } return false } func (c *conditionChecker) checkScalarFunction(scalar *expression.ScalarFunction) bool { switch scalar.FuncName.L { case ast.LogicOr, ast.LogicAnd: return c.check(scalar.GetArgs()[0]) && c.check(scalar.GetArgs()[1]) case ast.EQ, ast.NE, ast.GE, ast.GT, ast.LE, ast.LT: if _, ok := scalar.GetArgs()[0].(*expression.Constant); ok { if c.checkColumn(scalar.GetArgs()[1]) { return scalar.FuncName.L != ast.NE || c.length == types.UnspecifiedLength } } if _, ok := scalar.GetArgs()[1].(*expression.Constant); ok { if c.checkColumn(scalar.GetArgs()[0]) { return scalar.FuncName.L != ast.NE || c.length == types.UnspecifiedLength } } case ast.IsNull, ast.IsTruth, ast.IsFalsity: return c.checkColumn(scalar.GetArgs()[0]) case ast.UnaryNot: // TODO: support "not like" convert to access conditions. if s, ok := scalar.GetArgs()[0].(*expression.ScalarFunction); ok { if s.FuncName.L == ast.Like { return false } } else { // "not column" or "not constant" can't lead to a range. return false } return c.check(scalar.GetArgs()[0]) case ast.In: if !c.checkColumn(scalar.GetArgs()[0]) {<|fim▁hole|> for _, v := range scalar.GetArgs()[1:] { if _, ok := v.(*expression.Constant); !ok { return false } } return true case ast.Like: return c.checkLikeFunc(scalar) case ast.GetParam: return true } return false } func (c *conditionChecker) checkLikeFunc(scalar *expression.ScalarFunction) bool { if !c.checkColumn(scalar.GetArgs()[0]) { return false } pattern, ok := scalar.GetArgs()[1].(*expression.Constant) if !ok { return false } if pattern.Value.IsNull() { return false } patternStr, err := pattern.Value.ToString() if err != nil { return false } if len(patternStr) == 0 { return true } escape := byte(scalar.GetArgs()[2].(*expression.Constant).Value.GetInt64()) for i := 0; i < len(patternStr); i++ { if patternStr[i] == escape { i++ if i < len(patternStr)-1 { continue } break } if i == 0 && (patternStr[i] == '%' || patternStr[i] == '_') { return false } if patternStr[i] == '%' { if i != len(patternStr)-1 { c.shouldReserve = true } break } if patternStr[i] == '_' { c.shouldReserve = true break } } return true } func (c *conditionChecker) checkColumn(expr expression.Expression) bool { col, ok := expr.(*expression.Column) if !ok { return false } if c.colName.L != "" { return c.colName.L == col.ColName.L } return true }<|fim▁end|>
return false }
<|file_name|>primitives.py<|end_file_name|><|fim▁begin|>import random import struct ######################################################################################################################## class base_primitive (object): ''' The primitive base class implements common functionality shared across most primitives. ''' def __init__ (self): self.fuzz_complete = False # this flag is raised when the mutations are exhausted. self.fuzz_library = [] # library of static fuzz heuristics to cycle through. self.fuzzable = True # flag controlling whether or not the given primitive is to be fuzzed. self.mutant_index = 0 # current mutation index into the fuzz library. self.original_value = None # original value of primitive. self.rendered = "" # rendered value of primitive. self.value = None # current value of primitive. def exhaust (self): ''' Exhaust the possible mutations for this primitive. @rtype: Integer @return: The number of mutations to reach exhaustion ''' num = self.num_mutations() - self.mutant_index self.fuzz_complete = True self.mutant_index = self.num_mutations() self.value = self.original_value return num def mutate (self): ''' Mutate the primitive by stepping through the fuzz library, return False on completion. @rtype: Boolean @return: True on success, False otherwise. ''' # if we've ran out of mutations, raise the completion flag. if self.mutant_index == self.num_mutations(): self.fuzz_complete = True # if fuzzing was disabled or complete, and mutate() is called, ensure the original value is restored. if not self.fuzzable or self.fuzz_complete: self.value = self.original_value return False # update the current value from the fuzz library. self.value = self.fuzz_library[self.mutant_index] # increment the mutation count. self.mutant_index += 1 return True def num_mutations (self): ''' Calculate and return the total number of mutations for this individual primitive. @rtype: Integer @return: Number of mutated forms this primitive can take ''' return len(self.fuzz_library) def render (self): ''' Nothing fancy on render, simply return the value. ''' self.rendered = self.value return self.rendered def reset (self): ''' Reset this primitive to the starting mutation state. ''' self.fuzz_complete = False self.mutant_index = 0 self.value = self.original_value ######################################################################################################################## class delim (base_primitive): def __init__ (self, value, fuzzable=True, name=None): ''' Represent a delimiter such as :,\r,\n, ,=,>,< etc... Mutations include repetition, substitution and exclusion. @type value: Character @param value: Original value @type fuzzable: Boolean @param fuzzable: (Optional, def=True) Enable/disable fuzzing of this primitive @type name: String @param name: (Optional, def=None) Specifying a name gives you direct access to a primitive ''' self.value = self.original_value = value self.fuzzable = fuzzable self.name = name self.s_type = "delim" # for ease of object identification self.rendered = "" # rendered value self.fuzz_complete = False # flag if this primitive has been completely fuzzed self.fuzz_library = [] # library of fuzz heuristics self.mutant_index = 0 # current mutation number # # build the library of fuzz heuristics. # # if the default delim is not blank, repeat it a bunch of times. if self.value: self.fuzz_library.append(self.value * 2) self.fuzz_library.append(self.value * 5) self.fuzz_library.append(self.value * 10) self.fuzz_library.append(self.value * 25) self.fuzz_library.append(self.value * 100) self.fuzz_library.append(self.value * 500) self.fuzz_library.append(self.value * 1000) # try ommitting the delimiter. self.fuzz_library.append("") # if the delimiter is a space, try throwing out some tabs. if self.value == " ": self.fuzz_library.append("\t") self.fuzz_library.append("\t" * 2) self.fuzz_library.append("\t" * 100) # toss in some other common delimiters: self.fuzz_library.append(" ") self.fuzz_library.append("\t") self.fuzz_library.append("\t " * 100) self.fuzz_library.append("\t\r\n" * 100) self.fuzz_library.append("!") self.fuzz_library.append("@") self.fuzz_library.append("#") self.fuzz_library.append("$") self.fuzz_library.append("%") self.fuzz_library.append("^") self.fuzz_library.append("&") self.fuzz_library.append("*") self.fuzz_library.append("(") self.fuzz_library.append(")") self.fuzz_library.append("-") self.fuzz_library.append("_") self.fuzz_library.append("+") self.fuzz_library.append("=") self.fuzz_library.append(":") self.fuzz_library.append(": " * 100) self.fuzz_library.append(":7" * 100) self.fuzz_library.append(";") self.fuzz_library.append("'") self.fuzz_library.append("\"") self.fuzz_library.append("/") self.fuzz_library.append("\\") self.fuzz_library.append("?") self.fuzz_library.append("<") self.fuzz_library.append(">") self.fuzz_library.append(".") self.fuzz_library.append(",") self.fuzz_library.append("\r") self.fuzz_library.append("\n") self.fuzz_library.append("\r\n" * 64) self.fuzz_library.append("\r\n" * 128) self.fuzz_library.append("\r\n" * 512) ######################################################################################################################## class group (base_primitive): def __init__ (self, name, values): ''' This primitive represents a list of static values, stepping through each one on mutation. You can tie a block to a group primitive to specify that the block should cycle through all possible mutations for *each* value within the group. The group primitive is useful for example for representing a list of valid opcodes. @type name: String @param name: Name of group @type values: List or raw data @param values: List of possible raw values this group can take. ''' self.name = name self.values = values self.fuzzable = True self.s_type = "group" self.value = self.values[0] self.original_value = self.values[0] self.rendered = "" self.fuzz_complete = False self.mutant_index = 0 # sanity check that values list only contains strings (or raw data) if self.values != []: for val in self.values: assert type(val) is str, "Value list may only contain strings or raw data" def mutate (self): ''' Move to the next item in the values list. @rtype: False @return: False ''' if self.mutant_index == self.num_mutations(): self.fuzz_complete = True # if fuzzing was disabled or complete, and mutate() is called, ensure the original value is restored. if not self.fuzzable or self.fuzz_complete: self.value = self.values[0] return False # step through the value list. self.value = self.values[self.mutant_index] # increment the mutation count. self.mutant_index += 1 return True def num_mutations (self): ''' Number of values in this primitive. @rtype: Integer @return: Number of values in this primitive. ''' return len(self.values) ######################################################################################################################## class random_data (base_primitive): def __init__ (self, value, min_length, max_length, max_mutations=25, fuzzable=True, step=None, name=None): ''' Generate a random chunk of data while maintaining a copy of the original. A random length range can be specified. For a static length, set min/max length to be the same. @type value: Raw @param value: Original value @type min_length: Integer @param min_length: Minimum length of random block @type max_length: Integer @param max_length: Maximum length of random block @type max_mutations: Integer @param max_mutations: (Optional, def=25) Number of mutations to make before reverting to default @type fuzzable: Boolean @param fuzzable: (Optional, def=True) Enable/disable fuzzing of this primitive @type step: Integer @param step: (Optional, def=None) If not null, step count between min and max reps, otherwise random @type name: String @param name: (Optional, def=None) Specifying a name gives you direct access to a primitive ''' self.value = self.original_value = str(value) self.min_length = min_length self.max_length = max_length self.max_mutations = max_mutations self.fuzzable = fuzzable self.step = step self.name = name self.s_type = "random_data" # for ease of object identification self.rendered = "" # rendered value self.fuzz_complete = False # flag if this primitive has been completely fuzzed self.mutant_index = 0 # current mutation number if self.step: self.max_mutations = (self.max_length - self.min_length) / self.step + 1 def mutate (self): ''' Mutate the primitive value returning False on completion. @rtype: Boolean @return: True on success, False otherwise. ''' # if we've ran out of mutations, raise the completion flag. if self.mutant_index == self.num_mutations(): self.fuzz_complete = True # if fuzzing was disabled or complete, and mutate() is called, ensure the original value is restored. if not self.fuzzable or self.fuzz_complete: self.value = self.original_value return False # select a random length for this string. if not self.step: length = random.randint(self.min_length, self.max_length) # select a length function of the mutant index and the step. else: length = self.min_length + self.mutant_index * self.step # reset the value and generate a random string of the determined length. self.value = "" for i in xrange(length): self.value += chr(random.randint(0, 255)) # increment the mutation count. self.mutant_index += 1 return True def num_mutations (self): ''' Calculate and return the total number of mutations for this individual primitive. @rtype: Integer @return: Number of mutated forms this primitive can take ''' return self.max_mutations ######################################################################################################################## class static (base_primitive): def __init__ (self, value, name=None): ''' Primitive that contains static content. @type value: Raw @param value: Raw static data @type name: String @param name: (Optional, def=None) Specifying a name gives you direct access to a primitive ''' self.value = self.original_value = value self.name = name self.fuzzable = False # every primitive needs this attribute. self.mutant_index = 0 self.s_type = "static" # for ease of object identification self.rendered = "" self.fuzz_complete = True def mutate (self): ''' Do nothing. @rtype: False @return: False ''' return False def num_mutations (self): ''' Return 0. <|fim▁hole|> return 0 ######################################################################################################################## class string (base_primitive): # store fuzz_library as a class variable to avoid copying the ~70MB structure across each instantiated primitive. fuzz_library = [] def __init__ (self, value, size=-1, padding="\x00", encoding="ascii", fuzzable=True, max_len=0, name=None): ''' Primitive that cycles through a library of "bad" strings. The class variable 'fuzz_library' contains a list of smart fuzz values global across all instances. The 'this_library' variable contains fuzz values specific to the instantiated primitive. This allows us to avoid copying the near ~70MB fuzz_library data structure across each instantiated primitive. @type value: String @param value: Default string value @type size: Integer @param size: (Optional, def=-1) Static size of this field, leave -1 for dynamic. @type padding: Character @param padding: (Optional, def="\\x00") Value to use as padding to fill static field size. @type encoding: String @param encoding: (Optonal, def="ascii") String encoding, ex: utf_16_le for Microsoft Unicode. @type fuzzable: Boolean @param fuzzable: (Optional, def=True) Enable/disable fuzzing of this primitive @type max_len: Integer @param max_len: (Optional, def=0) Maximum string length @type name: String @param name: (Optional, def=None) Specifying a name gives you direct access to a primitive ''' self.value = self.original_value = value self.size = size self.padding = padding self.encoding = encoding self.fuzzable = fuzzable self.name = name self.s_type = "string" # for ease of object identification self.rendered = "" # rendered value self.fuzz_complete = False # flag if this primitive has been completely fuzzed self.mutant_index = 0 # current mutation number # add this specific primitives repitition values to the unique fuzz library. self.this_library = \ [ self.value * 2, self.value * 10, self.value * 100, # UTF-8 self.value * 2 + "\xfe", self.value * 10 + "\xfe", self.value * 100 + "\xfe", ] # if the fuzz library has not yet been initialized, do so with all the global values. if not self.fuzz_library: string.fuzz_library = \ [ # omission. "", # strings ripped from spike (and some others I added) "/.:/" + "A"*5000 + "\x00\x00", "/.../" + "A"*5000 + "\x00\x00", "/.../.../.../.../.../.../.../.../.../.../", "/../../../../../../../../../../../../etc/passwd", "/../../../../../../../../../../../../boot.ini", "..:..:..:..:..:..:..:..:..:..:..:..:..:", "\\\\*", "\\\\?\\", "/\\" * 5000, "/." * 5000, "!@#$%%^#$%#$@#$%$$@#$%^^**(()", "%01%02%03%04%0a%0d%0aADSF", "%01%02%03@%04%0a%0d%0aADSF", "/%00/", "%00/", "%00", "%u0000", "%\xfe\xf0%\x00\xff", "%\xfe\xf0%\x01\xff" * 20, # format strings. "%n" * 100, "%n" * 500, "\"%n\"" * 500, "%s" * 100, "%s" * 500, "\"%s\"" * 500, # command injection. "|touch /tmp/SULLEY", ";touch /tmp/SULLEY;", "|notepad", ";notepad;", "\nnotepad\n", # SQL injection. "1;SELECT%20*", "'sqlattempt1", "(sqlattempt2)", "OR%201=1", # some binary strings. "\xde\xad\xbe\xef", "\xde\xad\xbe\xef" * 10, "\xde\xad\xbe\xef" * 100, "\xde\xad\xbe\xef" * 1000, "\xde\xad\xbe\xef" * 10000, "\x00" * 1000, # miscellaneous. "\r\n" * 100, "<>" * 500, # sendmail crackaddr (http://lsd-pl.net/other/sendmail.txt) ] # add some long strings. self.add_long_strings("A") self.add_long_strings("B") self.add_long_strings("1") self.add_long_strings("2") self.add_long_strings("3") self.add_long_strings("<") self.add_long_strings(">") self.add_long_strings("'") self.add_long_strings("\"") self.add_long_strings("/") self.add_long_strings("\\") self.add_long_strings("?") self.add_long_strings("=") self.add_long_strings("a=") self.add_long_strings("&") self.add_long_strings(".") self.add_long_strings(",") self.add_long_strings("(") self.add_long_strings(")") self.add_long_strings("]") self.add_long_strings("[") self.add_long_strings("%") self.add_long_strings("*") self.add_long_strings("-") self.add_long_strings("+") self.add_long_strings("{") self.add_long_strings("}") self.add_long_strings("\x14") self.add_long_strings("\xFE") # expands to 4 characters under utf16 self.add_long_strings("\xFF") # expands to 4 characters under utf16 # add some long strings with null bytes thrown in the middle of it. for length in [128, 256, 1024, 2048, 4096, 32767, 0xFFFF]: s = "B" * length s = s[:len(s)/2] + "\x00" + s[len(s)/2:] string.fuzz_library.append(s) # if the optional file '.fuzz_strings' is found, parse each line as a new entry for the fuzz library. try: fh = open(".fuzz_strings", "r") for fuzz_string in fh.readlines(): fuzz_string = fuzz_string.rstrip("\r\n") if fuzz_string != "": string.fuzz_library.append(fuzz_string) fh.close() except: pass # delete strings which length is greater than max_len. if max_len > 0: if any(len(s) > max_len for s in self.this_library): self.this_library = list(set([s[:max_len] for s in self.this_library])) if any(len(s) > max_len for s in self.fuzz_library): self.fuzz_library = list(set([s[:max_len] for s in self.fuzz_library])) def add_long_strings (self, sequence): ''' Given a sequence, generate a number of selectively chosen strings lengths of the given sequence and add to the string heuristic library. @type sequence: String @param sequence: Sequence to repeat for creation of fuzz strings. ''' for length in [128, 255, 256, 257, 511, 512, 513, 1023, 1024, 2048, 2049, 4095, 4096, 4097, 5000, 10000, 20000, 32762, 32763, 32764, 32765, 32766, 32767, 32768, 32769, 0xFFFF-2, 0xFFFF-1, 0xFFFF, 0xFFFF+1, 0xFFFF+2, 99999, 100000, 500000, 1000000]: long_string = sequence * length string.fuzz_library.append(long_string) def mutate (self): ''' Mutate the primitive by stepping through the fuzz library extended with the "this" library, return False on completion. @rtype: Boolean @return: True on success, False otherwise. ''' # loop through the fuzz library until a suitable match is found. while 1: # if we've ran out of mutations, raise the completion flag. if self.mutant_index == self.num_mutations(): self.fuzz_complete = True # if fuzzing was disabled or complete, and mutate() is called, ensure the original value is restored. if not self.fuzzable or self.fuzz_complete: self.value = self.original_value return False # update the current value from the fuzz library. self.value = (self.fuzz_library + self.this_library)[self.mutant_index] # increment the mutation count. self.mutant_index += 1 # if the size parameter is disabled, break out of the loop right now. if self.size == -1: break # ignore library items greather then user-supplied length. # TODO: might want to make this smarter. if len(self.value) > self.size: continue # pad undersized library items. if len(self.value) < self.size: self.value = self.value + self.padding * (self.size - len(self.value)) break return True def num_mutations (self): ''' Calculate and return the total number of mutations for this individual primitive. @rtype: Integer @return: Number of mutated forms this primitive can take ''' return len(self.fuzz_library) + len(self.this_library) def render (self): ''' Render the primitive, encode the string according to the specified encoding. ''' # try to encode the string properly and fall back to the default value on failure. try: self.rendered = str(self.value).encode(self.encoding) except: self.rendered = self.value return self.rendered ######################################################################################################################## class bit_field (base_primitive): def __init__ (self, value, width, max_num=None, endian="<", format="binary", signed=False, full_range=False, fuzzable=True, name=None): ''' The bit field primitive represents a number of variable length and is used to define all other integer types. @type value: Integer @param value: Default integer value @type width: Integer @param width: Width of bit fields @type endian: Character @param endian: (Optional, def=LITTLE_ENDIAN) Endianess of the bit field (LITTLE_ENDIAN: <, BIG_ENDIAN: >) @type format: String @param format: (Optional, def=binary) Output format, "binary" or "ascii" @type signed: Boolean @param signed: (Optional, def=False) Make size signed vs. unsigned (applicable only with format="ascii") @type full_range: Boolean @param full_range: (Optional, def=False) If enabled the field mutates through *all* possible values. @type fuzzable: Boolean @param fuzzable: (Optional, def=True) Enable/disable fuzzing of this primitive @type name: String @param name: (Optional, def=None) Specifying a name gives you direct access to a primitive ''' assert(type(width) is int or type(value) is long) if type(value) in [int, long, list, tuple]: self.value = self.original_value = value else: raise ValueError("The supplied value must be either an Int, Long, List or Tuple.") self.width = width self.max_num = max_num self.endian = endian self.format = format self.signed = signed self.full_range = full_range self.fuzzable = fuzzable self.name = name self.rendered = "" # rendered value self.fuzz_complete = False # flag if this primitive has been completely fuzzed self.fuzz_library = [] # library of fuzz heuristics self.mutant_index = 0 # current mutation number self.cyclic_index = 0 # when cycling through non-mutating values if self.max_num == None: self.max_num = self.to_decimal("1" + "0" * width) assert(type(self.max_num) is int or type(self.max_num) is long) # build the fuzz library. if self.full_range: # add all possible values. for i in xrange(0, self.max_num): self.fuzz_library.append(i) else: if type(value) in [list, tuple]: # Use the supplied values as the fuzz library. for val in value: self.fuzz_library.append(val) else: # try only "smart" values. self.add_integer_boundaries(0) self.add_integer_boundaries(self.max_num / 2) self.add_integer_boundaries(self.max_num / 3) self.add_integer_boundaries(self.max_num / 4) self.add_integer_boundaries(self.max_num / 8) self.add_integer_boundaries(self.max_num / 16) self.add_integer_boundaries(self.max_num / 32) self.add_integer_boundaries(self.max_num) # if the optional file '.fuzz_ints' is found, parse each line as a new entry for the fuzz library. try: fh = open(".fuzz_ints", "r") for fuzz_int in fh.readlines(): # convert the line into an integer, continue on failure. try: fuzz_int = long(fuzz_int, 16) except: continue if fuzz_int < self.max_num: self.fuzz_library.append(fuzz_int) fh.close() except: pass def add_integer_boundaries (self, integer): ''' Add the supplied integer and border cases to the integer fuzz heuristics library. @type integer: Int @param integer: Integer to append to fuzz heuristics ''' for i in xrange(-10, 10): case = integer + i # ensure the border case falls within the valid range for this field. if 0 <= case < self.max_num: if case not in self.fuzz_library: self.fuzz_library.append(case) def render (self): ''' Render the primitive. ''' # # binary formatting. # if self.format == "binary": bit_stream = "" rendered = "" # pad the bit stream to the next byte boundary. if self.width % 8 == 0: bit_stream += self.to_binary() else: bit_stream = "0" * (8 - (self.width % 8)) bit_stream += self.to_binary() # convert the bit stream from a string of bits into raw bytes. for i in xrange(len(bit_stream) / 8): chunk = bit_stream[8*i:8*i+8] rendered += struct.pack("B", self.to_decimal(chunk)) # if necessary, convert the endianess of the raw bytes. if self.endian == "<": rendered = list(rendered) rendered.reverse() rendered = "".join(rendered) self.rendered = rendered # # ascii formatting. # else: # if the sign flag is raised and we are dealing with a signed integer (first bit is 1). if self.signed and self.to_binary()[0] == "1": max_num = self.to_decimal("1" + "0" * (self.width - 1)) # mask off the sign bit. val = self.value & self.to_decimal("1" * (self.width - 1)) # account for the fact that the negative scale works backwards. val = max_num - val - 1 # toss in the negative sign. self.rendered = "%d" % ~val # unsigned integer or positive signed integer. else: self.rendered = "%d" % self.value return self.rendered def to_binary (self, number=None, bit_count=None): ''' Convert a number to a binary string. @type number: Integer @param number: (Optional, def=self.value) Number to convert @type bit_count: Integer @param bit_count: (Optional, def=self.width) Width of bit string @rtype: String @return: Bit string ''' if number == None: if type(self.value) in [list, tuple]: # We have been given a list to cycle through that is not being mutated... if self.cyclic_index == len(self.value): # Reset the index. self.cyclic_index = 0 number = self.value[self.cyclic_index] self.cyclic_index += 1 else: number = self.value if bit_count == None: bit_count = self.width return "".join(map(lambda x:str((number >> x) & 1), range(bit_count -1, -1, -1))) def to_decimal (self, binary): ''' Convert a binary string to a decimal number. @type binary: String @param binary: Binary string @rtype: Integer @return: Converted bit string ''' return int(binary, 2) ######################################################################################################################## class byte (bit_field): def __init__ (self, value, endian="<", format="binary", signed=False, full_range=False, fuzzable=True, name=None): self.s_type = "byte" if type(value) not in [int, long, list, tuple]: value = struct.unpack(endian + "B", value)[0] bit_field.__init__(self, value, 8, None, endian, format, signed, full_range, fuzzable, name) ######################################################################################################################## class word (bit_field): def __init__ (self, value, endian="<", format="binary", signed=False, full_range=False, fuzzable=True, name=None): self.s_type = "word" if type(value) not in [int, long, list, tuple]: value = struct.unpack(endian + "H", value)[0] bit_field.__init__(self, value, 16, None, endian, format, signed, full_range, fuzzable, name) ######################################################################################################################## class dword (bit_field): def __init__ (self, value, endian="<", format="binary", signed=False, full_range=False, fuzzable=True, name=None): self.s_type = "dword" if type(value) not in [int, long, list, tuple]: value = struct.unpack(endian + "L", value)[0] bit_field.__init__(self, value, 32, None, endian, format, signed, full_range, fuzzable, name) ######################################################################################################################## class qword (bit_field): def __init__ (self, value, endian="<", format="binary", signed=False, full_range=False, fuzzable=True, name=None): self.s_type = "qword" if type(value) not in [int, long, list, tuple]: value = struct.unpack(endian + "Q", value)[0] bit_field.__init__(self, value, 64, None, endian, format, signed, full_range, fuzzable, name)<|fim▁end|>
@rtype: 0 @return: 0 '''
<|file_name|>test_write_idmap.py<|end_file_name|><|fim▁begin|>############################################################################### # # Tests for XlsxWriter. #<|fim▁hole|> import unittest from ...compatibility import StringIO from ...vml import Vml class TestWriteOidmap(unittest.TestCase): """ Test the Vml _write_idmap() method. """ def setUp(self): self.fh = StringIO() self.vml = Vml() self.vml._set_filehandle(self.fh) def test_write_idmap(self): """Test the _write_idmap() method""" self.vml._write_idmap(1) exp = """<o:idmap v:ext="edit" data="1"/>""" got = self.fh.getvalue() self.assertEqual(got, exp) if __name__ == '__main__': unittest.main()<|fim▁end|>
# Copyright (c), 2013, John McNamara, jmcnamara@cpan.org #
<|file_name|>SILDebugInfoGenerator.cpp<|end_file_name|><|fim▁begin|>//===--- SILDebugInfoGenerator.cpp - Writes a SIL file for debugging ------===// // // This source file is part of the Swift.org open source project // // Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors // Licensed under Apache License v2.0 with Runtime Library Exception // // See http://swift.org/LICENSE.txt for license information // See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors // //===----------------------------------------------------------------------===// #define DEBUG_TYPE "gsil-gen" #include "swift/AST/SILOptions.h" #include "swift/SIL/SILPrintContext.h" #include "swift/SIL/SILModule.h" #include "swift/SILOptimizer/PassManager/Transforms.h" #include "llvm/Support/FileSystem.h" #include "llvm/Support/MemoryBuffer.h" using namespace swift; namespace { /// A pass for generating debug info on SIL level. /// /// This pass is only enabled if SILOptions::SILOutputFileNameForDebugging is /// set (i.e. if the -gsil command line option is specified). /// The pass writes all SIL functions into one or multiple output files, /// depending on the size of the SIL. The names of the output files are derived /// from the main output file. /// /// output file name = <main-output-filename>.gsil_<n>.sil /// /// Where <n> is a consecutive number. The files are stored in the same /// same directory as the main output file. /// The debug locations and scopes of all functions and instructions are changed /// to point to the generated SIL output files. /// This enables debugging and profiling on SIL level. class SILDebugInfoGenerator : public SILModuleTransform { enum { /// To prevent extra large output files, e.g. when compiling the stdlib. LineLimitPerFile = 10000 }; /// A stream for counting line numbers. struct LineCountStream : public llvm::raw_ostream { llvm::raw_ostream &Underlying; int LineNum = 1; uint64_t Pos = 0; void write_impl(const char *Ptr, size_t Size) override { for (size_t Idx = 0; Idx < Size; Idx++) { char c = Ptr[Idx]; if (c == '\n') ++LineNum; } Underlying.write(Ptr, Size); Pos += Size; } uint64_t current_pos() const override { return Pos; } LineCountStream(llvm::raw_ostream &Underlying) : llvm::raw_ostream(/* unbuffered = */ true), Underlying(Underlying) { } ~LineCountStream() { flush(); } }; /// A print context which records the line numbers where instructions are /// printed. struct PrintContext : public SILPrintContext { LineCountStream LCS; llvm::DenseMap<const SILInstruction *, int> LineNums; void printInstructionCallBack(const SILInstruction *I) override { // Record the current line number of the instruction. LineNums[I] = LCS.LineNum; } PrintContext(llvm::raw_ostream &OS) : SILPrintContext(LCS), LCS(OS) { } virtual ~PrintContext() { } }; void run() override { SILModule *M = getModule(); StringRef FileBaseName = M->getOptions().SILOutputFileNameForDebugging; if (FileBaseName.empty()) return; DEBUG(llvm::dbgs() << "** SILDebugInfoGenerator **\n"); std::vector<SILFunction *> PrintedFuncs; int FileIdx = 0; auto FIter = M->begin(); while (FIter != M->end()) { std::string FileName; llvm::raw_string_ostream NameOS(FileName); NameOS << FileBaseName << ".gsil_" << FileIdx++ << ".sil"; NameOS.flush(); char *FileNameBuf = (char *)M->allocate(FileName.size() + 1, 1); strcpy(FileNameBuf, FileName.c_str()); DEBUG(llvm::dbgs() << "Write debug SIL file " << FileName << '\n'); std::error_code EC; llvm::raw_fd_ostream OutFile(FileName, EC, llvm::sys::fs::OpenFlags::F_None); assert(!OutFile.has_error() && !EC && "Can't write SIL debug file"); PrintContext Ctx(OutFile); // Write functions until we reach the LineLimitPerFile. do { SILFunction *F = &*FIter++; PrintedFuncs.push_back(F); // Set the debug scope for the function. SILLocation::DebugLoc DL(Ctx.LCS.LineNum, 1, FileNameBuf); RegularLocation Loc(DL); SILDebugScope *Scope = new (*M) SILDebugScope(Loc, F); F->setDebugScope(Scope); // Ensure that the function is visible for debugging. F->setBare(IsNotBare); // Print it to the output file. F->print(Ctx); } while (FIter != M->end() && Ctx.LCS.LineNum < LineLimitPerFile); // Set the debug locations of all instructions. for (SILFunction *F : PrintedFuncs) { const SILDebugScope *Scope = F->getDebugScope(); for (SILBasicBlock &BB : *F) { for (SILInstruction &I : BB) { SILLocation Loc = I.getLoc(); SILLocation::DebugLoc DL(Ctx.LineNums[&I], 1, FileNameBuf); assert(DL.Line && "no line set for instruction"); if (Loc.is<ReturnLocation>() || Loc.is<ImplicitReturnLocation>()) { Loc.setDebugInfoLoc(DL);<|fim▁hole|> I.setDebugLocation(SILDebugLocation(RLoc, Scope)); } } } } PrintedFuncs.clear(); } } StringRef getName() override { return "SILDebugInfoGenerator"; } }; } // end anonymous namespace SILTransform *swift::createSILDebugInfoGenerator() { return new SILDebugInfoGenerator(); }<|fim▁end|>
I.setDebugLocation(SILDebugLocation(Loc, Scope)); } else { RegularLocation RLoc(DL);
<|file_name|>SecurityRole.java<|end_file_name|><|fim▁begin|>package mat.model; import com.google.gwt.user.client.rpc.IsSerializable; /** * The Class SecurityRole. */ public class SecurityRole implements IsSerializable { /** The Constant serialVersionUID. */ private static final long serialVersionUID = 1L; /** The Constant ADMIN_ROLE. */ public static final String ADMIN_ROLE = "Administrator"; /** The Constant USER_ROLE. */ public static final String USER_ROLE = "User"; /** The Constant SUPER_USER_ROLE. */ public static final String SUPER_USER_ROLE = "Super user"; /** The Constant ADMIN_ROLE_ID. */ public static final String ADMIN_ROLE_ID = "1"; /** The Constant USER_ROLE_ID. */ public static final String USER_ROLE_ID = "2"; /** The Constant SUPER_USER_ROLE_ID. */ public static final String SUPER_USER_ROLE_ID = "3"; /** The id. */ private String id; /** The description. */ private String description; /** * Gets the id. * * @return the id */ public String getId() {<|fim▁hole|> * Sets the id. * * @param id * the new id */ public void setId(String id) { this.id = id; } /** * Gets the description. * * @return the description */ public String getDescription() { return description; } /** * Sets the description. * * @param description * the new description */ public void setDescription(String description) { this.description = description; } }<|fim▁end|>
return id; } /**
<|file_name|>A_evolve_outer_star_to_giant.py<|end_file_name|><|fim▁begin|>import os import os.path from amuse.units import units from amuse.datamodel import Particle from amuse.ext.star_to_sph import pickle_stellar_model from amuse.community.mesa.interface import MESA as stellar_evolution_code from xiTau_parameters import triple_parameters def evolve_giant(giant, stop_radius): stellar_evolution = stellar_evolution_code() giant_in_code = stellar_evolution.particles.add_particle(giant) while (giant_in_code.radius < 0.7 | units.AU): giant_in_code.evolve_one_step() print "Giant starts to ascend the giant branch, now saving model every step..." print giant_in_code.as_set() i = 0 while (giant_in_code.radius < stop_radius):<|fim▁hole|> giant_in_code.evolve_one_step() print giant_in_code.radius, giant_in_code.age pickle_file_name = "./model_{0:=04}_".format(i) + "%0.1f"%(giant_in_code.radius.value_in(units.AU)) pickle_stellar_model(giant_in_code, pickle_file_name) i += 1 if __name__ == "__main__": model_directory = os.path.join("../../../../../BIGDATA/code/amuse-10.0", "giant_models") if not os.path.exists(model_directory): os.mkdir(model_directory) os.chdir(model_directory) giant = Particle(mass = triple_parameters["mass_out"]) print "\nEvolving with", stellar_evolution_code.__name__ evolve_giant(giant, 1.0 | units.AU) print "Done"<|fim▁end|>
<|file_name|>deconv_simult.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python import sys, time, scipy.optimize, scipy.ndimage.interpolation import scipy.ndimage.filters as filt import src.lib.utils as fn import src.lib.wsutils as ws import src.lib.Algorithms as alg import scipy.signal.signaltools as sig from src.lib.PSF import * from src.lib.AImage import * from src.lib.wavelet import * from src.lib.deconv import * from numpy import * from copy import deepcopy out = fn.Verbose() CUDA = False def deconv(data, params, savedir='results/'): global err, old_bkg, TRACE, TRACE2, err_bk, err_sm err = old_bkg = None sfact, gres, itnb, mpar = params['S_FACT'], params['G_RES'], params['MAX_IT_D'], params['MOF_PARAMS'] gstrat, gpar, gpos = params['G_STRAT'], params['G_PARAMS'], params['G_POS'] show, maxpos_range, stddev = params['SHOW'], params['MAXPOS_RANGE'], params['SIGMA_SKY'] max_iratio_range, force_ini = params['MAX_IRATIO_RANGE'], params['FORCE_INI'] bkg_ini, stepfact, bkg_ratio = params['BKG_INI_CST'], params['BKG_STEP_RATIO'], params['BKG_START_RATIO'] _lambda, nbruns, nb_src = params['LAMBDA'], params['D_NB_RUNS'], params['NB_SRC'] box_size, src_range, cuda = params['BOX_SIZE'], params['SRC_RANGE'], params['CUDA'] srcini, minstep_px, maxstep_px = params['INI_PAR'], params['MIN_STEP_D'], params['MAX_STEP_D'] thresh = params['WL_THRESHOLD_DEC'] out(2, 'Initialization') nimg = params['filenb'] objs = [data['objs'][i][0].astype('float64') for i in xrange(nimg)] sigmas = [data['objssigs'][i][0].astype('float64') for i in xrange(nimg)] masks = [data['objsmasks'][i][0].astype('float64') for i in xrange(nimg)] psfs = [data['psfs'][i][0].astype('float64') for i in xrange(nimg)] dev = stddev[0] mpar = mpar[0] gpar = gpar[0] gpos = gpos[0] bshape = objs[0].shape sshape = (int(bshape[0]*sfact), int(bshape[1]*sfact)) sources = [PSF(sshape, (sshape[0]/2., sshape[1]/2.)) for i in xrange(nimg)] ############### Prepare the gaussian PSF ############### r_len = sshape[0] c1, c2 = r_len/2.-0.5, r_len/2.-0.5 #-0.5 to align on the pixels grid r = fn.gaussian(sshape, gres, c1, c2, 1.) # r = fn.LG_filter(sshape, gres, c1, c2) if cuda and fn.has_cuda(): out(2, 'CUDA initializations') context, plan = fn.cuda_init(sshape) r = fn.switch_psf_shape(r, 'SW') def conv(a, b): return fn.cuda_conv(plan, a, b) def div(a, b): return fn.cuda_fftdiv(plan, a, b) psfs = [fn.switch_psf_shape(p, 'SW') for p in psfs] else: conv = fn.conv div = None#fn.div cuda = False r /= r.sum() div = None ########## Initializations ########## img_shifts = fn.get_shifts(objs, 10.) _lambda = fn.get_lambda(sshape, None, _lambda) # if not thresh: # thresh = params['SIGMA_SKY'] dec = DecSrc(objs, sigmas, masks, psfs, r, conv, img_shifts, _lambda, gres, thresh, nb_src, srcini, box_size, src_range, force_ini, bkg_ini, bkg_ratio) ########## Deconvolution ########## bak, src_par = dec.deconv(itnb, minstep_px, maxstep_px, maxpos_range, max_iratio_range, stepfact, nbruns) out(2, 'Initial sources parameters [x,y,I]:', dec.src_ini) out(2, 'Final sources parameters [x,y,I]:', src_par) out(2, 'offsets:', dec.shifts) ############ Prepare output ############ imgs, names = [], [] imgs += [bak, dec.ini] names += ['background', 'bkg_ini'] dec.set_sources(src_par, bak) for i in xrange(len(objs)): bak_conv = conv(dec.psfs[i], bak+dec.sources[i].array) resi = dec.get_im_resi(bak_conv, i) imgs += [objs[i], resi, dec.sources[i].array, bak+dec.sources[i].array] names += ["g_"+str(i+1), "resi_%(fnb)02d" % {'fnb':i+1}, "sources_%(fnb)02d" % {'fnb':i+1}, "deconv_%(fnb)02d" % {'fnb':i+1}] ############ Save and display ############ if savedir is not None: out(2, 'Writing to disk...') for i, im in enumerate(imgs): fn.array2fits(im, savedir+names[i]+'.fits') # fn.save_img(imgs, names, savedir+'overview.png', min_size=(256,256)) if show == True: out(2, 'Displaying results...') for i, im in enumerate(imgs): fn.array2ds9(im, name=names[i], frame=i+1) import pylab as p p.figure(1) trace = array(dec.trace) X = arange(trace.shape[0]) p.title('Error evolution') p.plot(X, trace) # p.legend() p.draw() p.savefig(savedir+'trace_deconv.png') if show == True: p.show() if cuda: out(2, 'Freeing CUDA context...') context.pop() <|fim▁hole|> cfg = 'config.py' if argv is not None: sys.argv = argv opt, args = fn.get_args(sys.argv) MAX_IT_D = MAXPOS_STEP = MAX_IRATIO_STEP = SHOW = FORCE_INI = None if args is not None: cfg = args[0] if 's' in opt: out.level = 0 if 'v' in opt: out.level = 2 if 'd' in opt: DEBUG = True out.level = 3 out(1, '~~~ DEBUG MODE ~~~') if 'b' in opt: import prepare prepare.main(['deconv.py', '-b', cfg]) if 'e' in opt: import prepare prepare.main(['deconv.py', '-ce', cfg]) if 'i' in opt: FORCE_INI = True if 'h' in opt: out(1, 'No help page yet!') return 0 out(1, 'Begin deconvolution process') #TODO: check workspace f = open(cfg, 'r') exec f.read() f.close() vars = ['FILENAME', 'MAX_IT_D', 'S_FACT', 'G_RES', 'SIGMA_SKY', 'MOF_PARAMS', 'G_STRAT', 'G_PARAMS', 'G_POS', 'CENTER'] err = fn.check_namespace(vars, locals()) if err > 0: return 1 out(2, FILENAME) #@UndefinedVariable out(2, 'Restore data from extracted files') fnb = ws.get_filenb(FILENAME) #@UndefinedVariable files = ws.getfilenames(fnb) data = ws.restore(*files) data['filenb'] = fnb dec = deconv(data, locals()) # if NOWRITE is False: # fn.write_cfg(cfg, {'DEC_PAR':dec}) out(1, 'Deconvolution done') return 0 if __name__ == "__main__": # import cProfile, pstats # prof = cProfile.Profile() # prof = prof.runctx("main()", globals(), locals()) # stats = pstats.Stats(prof) # stats.sort_stats("time") # Or cumulative # stats.print_stats(15) # how many to print sys.exit(main())<|fim▁end|>
return src_par, dec.shifts, dec.src_ini def main(argv=None):
<|file_name|>weaponupgradescpuneedbonuspostpercentcpulocationshipmodulesrequiringmissilelauncheroperation.py<|end_file_name|><|fim▁begin|># weaponUpgradesCpuNeedBonusPostPercentCpuLocationShipModulesRequiringMissileLauncherOperation # # Used by: # Implants named like: Zainou 'Gnome' Launcher CPU Efficiency LE (6 of 6) # Skill: Weapon Upgrades<|fim▁hole|>def handler(fit, container, context): level = container.level if "skill" in context else 1 fit.modules.filteredItemBoost(lambda mod: mod.item.requiresSkill("Missile Launcher Operation"), "cpu", container.getModifiedItemAttr("cpuNeedBonus") * level)<|fim▁end|>
type = "passive"
<|file_name|>FindInFiles-test.js<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2014 - present Adobe Systems Incorporated. All rights reserved. * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. * */ /*jslint regexp: true */ /*global describe, it, expect, beforeFirst, afterLast, beforeEach, afterEach, waits, waitsFor, waitsForDone, runs, spyOn */ define(function (require, exports, module) { "use strict"; var Commands = require("command/Commands"), KeyEvent = require("utils/KeyEvent"), SpecRunnerUtils = require("spec/SpecRunnerUtils"), FileSystemError = require("filesystem/FileSystemError"), FileUtils = require("file/FileUtils"), FindUtils = require("search/FindUtils"), Async = require("utils/Async"), LanguageManager = require("language/LanguageManager"), StringUtils = require("utils/StringUtils"), Strings = require("strings"), _ = require("thirdparty/lodash"); var PreferencesManager; var promisify = Async.promisify; // for convenience describe("FindInFiles", function () { this.category = "integration"; var defaultSourcePath = SpecRunnerUtils.getTestPath("/spec/FindReplace-test-files"), testPath, nextFolderIndex = 1, searchResults, CommandManager, DocumentManager, MainViewManager, EditorManager, FileFilters, FileSystem, File, FindInFiles, FindInFilesUI, ProjectManager, testWindow, $; beforeFirst(function () { SpecRunnerUtils.createTempDirectory(); // Create a new window that will be shared by ALL tests in this spec. SpecRunnerUtils.createTestWindowAndRun(this, function (w) { testWindow = w; // Load module instances from brackets.test CommandManager = testWindow.brackets.test.CommandManager; DocumentManager = testWindow.brackets.test.DocumentManager; EditorManager = testWindow.brackets.test.EditorManager; FileFilters = testWindow.brackets.test.FileFilters; FileSystem = testWindow.brackets.test.FileSystem; File = testWindow.brackets.test.File; FindInFiles = testWindow.brackets.test.FindInFiles; FindInFilesUI = testWindow.brackets.test.FindInFilesUI; ProjectManager = testWindow.brackets.test.ProjectManager; MainViewManager = testWindow.brackets.test.MainViewManager; $ = testWindow.$; PreferencesManager = testWindow.brackets.test.PreferencesManager; PreferencesManager.set("findInFiles.nodeSearch", false); PreferencesManager.set("findInFiles.instantSearch", false); }); }); afterLast(function () { CommandManager = null; DocumentManager = null; EditorManager = null; FileSystem = null; File = null; FindInFiles = null; FindInFilesUI = null; ProjectManager = null; MainViewManager = null; $ = null; testWindow = null; PreferencesManager = null; SpecRunnerUtils.closeTestWindow(); SpecRunnerUtils.removeTempDirectory(); }); function openProject(sourcePath) { testPath = sourcePath; SpecRunnerUtils.loadProjectInTestWindow(testPath); } // Note: these utilities can be called without wrapping in a runs() block, because all their top-level // statements are calls to runs() or waitsFor() (or other functions that make the same guarantee). But after // calling one of these, calls to other Jasmine APIs (e.g. such as expects()) *must* be wrapped in runs(). function waitForSearchBarClose() { // Make sure search bar from previous test has animated out fully waitsFor(function () { return $(".modal-bar").length === 0; }, "search bar close"); } function openSearchBar(scope, showReplace) { runs(function () { FindInFiles._searchDone = false; FindInFilesUI._showFindBar(scope, showReplace); }); waitsFor(function () { return $(".modal-bar").length === 1; }, "search bar open"); runs(function () { // Reset the regexp and case-sensitivity toggles. ["#find-regexp", "#find-case-sensitive"].forEach(function (button) { if ($(button).is(".active")) { $(button).click(); expect($(button).is(".active")).toBe(false); } }); }); } function closeSearchBar() { runs(function () { FindInFilesUI._closeFindBar(); }); waitForSearchBarClose(); } function executeSearch(searchString) { runs(function () { var $searchField = $("#find-what"); $searchField.val(searchString).trigger("input"); SpecRunnerUtils.simulateKeyEvent(KeyEvent.DOM_VK_RETURN, "keydown", $searchField[0]); }); waitsFor(function () { return FindInFiles._searchDone; }, "Find in Files done"); } function numMatches(results) { return _.reduce(_.pluck(results, "matches"), function (sum, matches) { return sum + matches.length; }, 0); } function doSearch(options) { runs(function () { FindInFiles.doSearchInScope(options.queryInfo, null, null, options.replaceText).done(function (results) { searchResults = results; }); }); waitsFor(function () { return searchResults; }, 1000, "search completed"); runs(function () { expect(numMatches(searchResults)).toBe(options.numMatches); }); } // The functions below are *not* safe to call without wrapping in runs(), if there were any async steps previously // (including calls to any of the utilities above) function doReplace(options) { return FindInFiles.doReplace(searchResults, options.replaceText, { forceFilesOpen: options.forceFilesOpen, isRegexp: options.queryInfo.isRegexp }); } /** * Helper function that calls the given asynchronous processor once on each file in the given subtree * and returns a promise that's resolved when all files are processed. * @param {string} rootPath The root of the subtree to search. * @param {function(string, string): $.Promise} processor The function that processes each file. Args are: * contents: the contents of the file * fullPath: the full path to the file on disk * @return {$.Promise} A promise that is resolved when all files are processed, or rejected if there was * an error reading one of the files or one of the process steps was rejected. */ function visitAndProcessFiles(rootPath, processor) { var rootEntry = FileSystem.getDirectoryForPath(rootPath), files = []; function visitor(file) { if (!file.isDirectory) { // Skip binary files, since we don't care about them for these purposes and we can't read them // to get their contents. if (!LanguageManager.getLanguageForPath(file.fullPath).isBinary()) { files.push(file); } } return true; } return promisify(rootEntry, "visit", visitor).then(function () { return Async.doInParallel(files, function (file) { return promisify(file, "read").then(function (contents) { return processor(contents, file.fullPath); }); }); }); } function ensureParentExists(file) { var parentDir = FileSystem.getDirectoryForPath(file.parentPath); return promisify(parentDir, "exists").then(function (exists) { if (!exists) { return promisify(parentDir, "create"); } return null; }); } function copyWithLineEndings(src, dest, lineEndings) { function copyOneFileWithLineEndings(contents, srcPath) { var destPath = dest + srcPath.slice(src.length), destFile = FileSystem.getFileForPath(destPath), newContents = FileUtils.translateLineEndings(contents, lineEndings); return ensureParentExists(destFile).then(function () { return promisify(destFile, "write", newContents); }); } return promisify(FileSystem.getDirectoryForPath(dest), "create").then(function () { return visitAndProcessFiles(src, copyOneFileWithLineEndings); }); } // Creates a clean copy of the test project before each test. We don't delete the old // folders as we go along (to avoid problems with deleting the project out from under the // open test window); we just delete the whole temp folder at the end. function openTestProjectCopy(sourcePath, lineEndings) { testPath = SpecRunnerUtils.getTempDirectory() + "/find-in-files-test-" + (nextFolderIndex++); runs(function () { if (lineEndings) { waitsForDone(copyWithLineEndings(sourcePath, testPath, lineEndings), "copy test files with line endings"); } else { // Note that we don't skip image files in this case, but it doesn't matter since we'll // only compare files that have an associated file in the known goods folder. waitsForDone(SpecRunnerUtils.copy(sourcePath, testPath), "copy test files"); } }); SpecRunnerUtils.loadProjectInTestWindow(testPath); } beforeEach(function () { searchResults = null; }); describe("Find", function () { beforeEach(function () { openProject(defaultSourcePath); }); afterEach(closeSearchBar); it("should find all occurences in project", function () { openSearchBar(); executeSearch("foo"); runs(function () { var fileResults = FindInFiles.searchModel.results[testPath + "/bar.txt"]; expect(fileResults).toBeFalsy(); fileResults = FindInFiles.searchModel.results[testPath + "/foo.html"]; expect(fileResults).toBeTruthy(); expect(fileResults.matches.length).toBe(7); fileResults = FindInFiles.searchModel.results[testPath + "/foo.js"]; expect(fileResults).toBeTruthy(); expect(fileResults.matches.length).toBe(4); fileResults = FindInFiles.searchModel.results[testPath + "/css/foo.css"]; expect(fileResults).toBeTruthy(); expect(fileResults.matches.length).toBe(3); }); }); it("should ignore known binary file types", function () { var $dlg, actualMessage, expectedMessage, exists = false, done = false, imageDirPath = testPath + "/images"; runs(function () { // Set project to have only images SpecRunnerUtils.loadProjectInTestWindow(imageDirPath); // Verify an image exists in folder var file = FileSystem.getFileForPath(testPath + "/images/icon_twitter.png"); file.exists(function (fileError, fileExists) { exists = fileExists; done = true; }); }); waitsFor(function () { return done; }, "file.exists"); runs(function () { expect(exists).toBe(true); openSearchBar(); }); runs(function () { // Launch filter editor FileFilters.editFilter({ name: "", patterns: [] }, -1); // Dialog should state there are 0 files in project $dlg = $(".modal"); expectedMessage = StringUtils.format(Strings.FILTER_FILE_COUNT_ALL, 0, Strings.FIND_IN_FILES_NO_SCOPE); }); // Message loads asynchronously, but dialog should eventually state: "Allows all 0 files in project" waitsFor(function () { actualMessage = $dlg.find(".exclusions-filecount").text(); return (actualMessage === expectedMessage); }, "display file count"); runs(function () { // Dismiss filter dialog (OK button is disabled, have to click on Cancel) $dlg.find(".dialog-button[data-button-id='cancel']").click(); // Close search bar var $searchField = $(".modal-bar #find-group input"); SpecRunnerUtils.simulateKeyEvent(KeyEvent.DOM_VK_ESCAPE, "keydown", $searchField[0]); }); runs(function () { // Set project back to main test folder SpecRunnerUtils.loadProjectInTestWindow(testPath); }); }); it("should ignore unreadable files", function () { // Add a nonexistent file to the ProjectManager.getAllFiles() result, which will force a file IO error // when we try to read the file later. Similar errors may arise in real-world for non-UTF files, etc. SpecRunnerUtils.injectIntoGetAllFiles(testWindow, testPath + "/doesNotExist.txt"); openSearchBar(); executeSearch("foo"); runs(function () { expect(Object.keys(FindInFiles.searchModel.results).length).toBe(3); }); }); it("should find all occurences in folder", function () { var dirEntry = FileSystem.getDirectoryForPath(testPath + "/css/"); openSearchBar(dirEntry); executeSearch("foo"); runs(function () { var fileResults = FindInFiles.searchModel.results[testPath + "/bar.txt"]; expect(fileResults).toBeFalsy(); fileResults = FindInFiles.searchModel.results[testPath + "/foo.html"]; expect(fileResults).toBeFalsy(); fileResults = FindInFiles.searchModel.results[testPath + "/foo.js"]; expect(fileResults).toBeFalsy(); fileResults = FindInFiles.searchModel.results[testPath + "/css/foo.css"]; expect(fileResults).toBeTruthy(); expect(fileResults.matches.length).toBe(3); }); }); it("should find all occurences in single file", function () { var fileEntry = FileSystem.getFileForPath(testPath + "/foo.js"); openSearchBar(fileEntry); executeSearch("foo"); runs(function () { var fileResults = FindInFiles.searchModel.results[testPath + "/bar.txt"]; expect(fileResults).toBeFalsy(); fileResults = FindInFiles.searchModel.results[testPath + "/foo.html"]; expect(fileResults).toBeFalsy(); fileResults = FindInFiles.searchModel.results[testPath + "/foo.js"]; expect(fileResults).toBeTruthy(); expect(fileResults.matches.length).toBe(4); fileResults = FindInFiles.searchModel.results[testPath + "/css/foo.css"]; expect(fileResults).toBeFalsy(); }); }); it("should find start and end positions", function () { var filePath = testPath + "/foo.js", fileEntry = FileSystem.getFileForPath(filePath); openSearchBar(fileEntry); executeSearch("callFoo"); runs(function () { var fileResults = FindInFiles.searchModel.results[filePath]; expect(fileResults).toBeTruthy(); expect(fileResults.matches.length).toBe(1); var match = fileResults.matches[0]; expect(match.start.ch).toBe(13); expect(match.start.line).toBe(6); expect(match.end.ch).toBe(20); expect(match.end.line).toBe(6); }); }); it("should keep dialog and show panel when there are results", function () { var filePath = testPath + "/foo.js", fileEntry = FileSystem.getFileForPath(filePath); openSearchBar(fileEntry); executeSearch("callFoo"); // With instant search, the Search Bar should not close on a search runs(function () { var fileResults = FindInFiles.searchModel.results[filePath]; expect(fileResults).toBeTruthy(); expect($("#find-in-files-results").is(":visible")).toBeTruthy(); expect($(".modal-bar").length).toBe(1); }); }); it("should keep dialog and not show panel when there are no results", function () { var filePath = testPath + "/bar.txt", fileEntry = FileSystem.getFileForPath(filePath); openSearchBar(fileEntry); executeSearch("abcdefghi"); waitsFor(function () { return (FindInFiles._searchDone); }, "search complete"); runs(function () { var result, resultFound = false; // verify searchModel.results Object is empty for (result in FindInFiles.searchModel.results) { if (FindInFiles.searchModel.results.hasOwnProperty(result)) { resultFound = true; } } expect(resultFound).toBe(false); expect($("#find-in-files-results").is(":visible")).toBeFalsy(); expect($(".modal-bar").length).toBe(1); // Close search bar var $searchField = $(".modal-bar #find-group input"); SpecRunnerUtils.simulateKeyEvent(KeyEvent.DOM_VK_ESCAPE, "keydown", $searchField[0]); }); }); it("should open file in editor and select text when a result is clicked", function () { var filePath = testPath + "/foo.html", fileEntry = FileSystem.getFileForPath(filePath); openSearchBar(fileEntry); executeSearch("foo"); runs(function () { // Verify no current document var editor = EditorManager.getActiveEditor(); expect(editor).toBeFalsy(); // Get panel var $searchResults = $("#find-in-files-results"); expect($searchResults.is(":visible")).toBeTruthy(); // Get list in panel var $panelResults = $searchResults.find("table.bottom-panel-table tr"); expect($panelResults.length).toBe(8); // 7 hits + 1 file section // First item in list is file section expect($($panelResults[0]).hasClass("file-section")).toBeTruthy(); // Click second item which is first hit var $firstHit = $($panelResults[1]); expect($firstHit.hasClass("file-section")).toBeFalsy(); $firstHit.click(); setTimeout(function () { // Verify current document editor = EditorManager.getActiveEditor(); expect(editor.document.file.fullPath).toEqual(filePath); // Verify selection expect(editor.getSelectedText().toLowerCase() === "foo"); waitsForDone(CommandManager.execute(Commands.FILE_CLOSE_ALL), "closing all files"); }, 500); }); }); it("should open file in working set when a result is double-clicked", function () { var filePath = testPath + "/foo.js", fileEntry = FileSystem.getFileForPath(filePath); openSearchBar(fileEntry); executeSearch("foo"); runs(function () { // Verify document is not yet in working set expect(MainViewManager.findInWorkingSet(MainViewManager.ALL_PANES, filePath)).toBe(-1); // Get list in panel var $panelResults = $("#find-in-files-results table.bottom-panel-table tr"); expect($panelResults.length).toBe(5); // 4 hits + 1 file section // Double-click second item which is first hit var $firstHit = $($panelResults[1]); expect($firstHit.hasClass("file-section")).toBeFalsy(); $firstHit.dblclick(); // Verify document is now in working set expect(MainViewManager.findInWorkingSet(MainViewManager.ALL_PANES, filePath)).not.toBe(-1); waitsForDone(CommandManager.execute(Commands.FILE_CLOSE_ALL), "closing all files"); }); }); it("should update results when a result in a file is edited", function () { var filePath = testPath + "/foo.html", fileEntry = FileSystem.getFileForPath(filePath), panelListLen = 8, // 7 hits + 1 file section $panelResults; openSearchBar(fileEntry); executeSearch("foo"); runs(function () { // Verify document is not yet in working set expect(MainViewManager.findInWorkingSet(MainViewManager.ALL_PANES, filePath)).toBe(-1); // Get list in panel $panelResults = $("#find-in-files-results table.bottom-panel-table tr"); expect($panelResults.length).toBe(panelListLen); // Click second item which is first hit var $firstHit = $($panelResults[1]); expect($firstHit.hasClass("file-section")).toBeFalsy(); $firstHit.click(); }); // Wait for file to open if not already open waitsFor(function () { var editor = EditorManager.getActiveEditor(); return (editor.document.file.fullPath === filePath); }, 1000, "file open"); // Wait for selection to change (this happens asynchronously after file opens) waitsFor(function () { var editor = EditorManager.getActiveEditor(), sel = editor.getSelection(); return (sel.start.line === 4 && sel.start.ch === 7); }, 1000, "selection change"); runs(function () { // Verify current selection var editor = EditorManager.getActiveEditor(); expect(editor.getSelectedText().toLowerCase()).toBe("foo"); // Edit text to remove hit from file var sel = editor.getSelection(); editor.document.replaceRange("Bar", sel.start, sel.end); }); // Panel is updated asynchronously waitsFor(function () { $panelResults = $("#find-in-files-results table.bottom-panel-table tr"); return ($panelResults.length < panelListLen); }, "Results panel updated"); runs(function () { // Verify list automatically updated expect($panelResults.length).toBe(panelListLen - 1); waitsForDone(CommandManager.execute(Commands.FILE_CLOSE, { _forceClose: true }), "closing file"); }); }); it("should not clear the model until next search is actually committed", function () { var filePath = testPath + "/foo.js", fileEntry = FileSystem.getFileForPath(filePath); openSearchBar(fileEntry); executeSearch("foo"); runs(function () { expect(Object.keys(FindInFiles.searchModel.results).length).not.toBe(0); }); closeSearchBar(); openSearchBar(fileEntry); runs(function () { // Search model shouldn't be cleared from merely reopening search bar expect(Object.keys(FindInFiles.searchModel.results).length).not.toBe(0); }); closeSearchBar(); runs(function () { // Search model shouldn't be cleared after search bar closed without running a search expect(Object.keys(FindInFiles.searchModel.results).length).not.toBe(0); }); }); }); describe("Find results paging", function () { var expectedPages = [ { totalResults: 500, totalFiles: 2, overallFirstIndex: 1, overallLastIndex: 100, matchRanges: [{file: 0, filename: "manyhits-1.txt", first: 0, firstLine: 1, last: 99, lastLine: 100, pattern: /i'm going to\s+find this\s+now/}], firstPageEnabled: false, lastPageEnabled: true, prevPageEnabled: false, nextPageEnabled: true }, { totalResults: 500, totalFiles: 2, overallFirstIndex: 101, overallLastIndex: 200, matchRanges: [{file: 0, filename: "manyhits-1.txt", first: 0, firstLine: 101, last: 99, lastLine: 200, pattern: /i'm going to\s+find this\s+now/}], firstPageEnabled: true, lastPageEnabled: true, prevPageEnabled: true, nextPageEnabled: true }, { totalResults: 500, totalFiles: 2, overallFirstIndex: 201, overallLastIndex: 300, matchRanges: [ {file: 0, filename: "manyhits-1.txt", first: 0, firstLine: 201, last: 49, lastLine: 250, pattern: /i'm going to\s+find this\s+now/}, {file: 1, filename: "manyhits-2.txt", first: 0, firstLine: 1, last: 49, lastLine: 50, pattern: /you're going to\s+find this\s+now/} ], firstPageEnabled: true, lastPageEnabled: true, prevPageEnabled: true, nextPageEnabled: true }, { totalResults: 500, totalFiles: 2, overallFirstIndex: 301, overallLastIndex: 400, matchRanges: [{file: 0, filename: "manyhits-2.txt", first: 0, firstLine: 51, last: 99, lastLine: 150, pattern: /you're going to\s+find this\s+now/}], firstPageEnabled: true, lastPageEnabled: true, prevPageEnabled: true, nextPageEnabled: true }, { totalResults: 500, totalFiles: 2, overallFirstIndex: 401, overallLastIndex: 500, matchRanges: [{file: 0, filename: "manyhits-2.txt", first: 0, firstLine: 151, last: 99, lastLine: 250, pattern: /you're going to\s+find this\s+now/}], firstPageEnabled: true, lastPageEnabled: false, prevPageEnabled: true, nextPageEnabled: false } ]; function expectPageDisplay(options) { // Check the title expect($("#find-in-files-results .title").text().match("\\b" + options.totalResults + "\\b")).toBeTruthy(); expect($("#find-in-files-results .title").text().match("\\b" + options.totalFiles + "\\b")).toBeTruthy(); var paginationInfo = $("#find-in-files-results .pagination-col").text(); expect(paginationInfo.match("\\b" + options.overallFirstIndex + "\\b")).toBeTruthy(); expect(paginationInfo.match("\\b" + options.overallLastIndex + "\\b")).toBeTruthy(); // Check for presence of file and first/last item rows within each file options.matchRanges.forEach(function (range) { var $fileRow = $("#find-in-files-results tr.file-section[data-file-index='" + range.file + "']"); expect($fileRow.length).toBe(1); expect($fileRow.find(".dialog-filename").text()).toEqual(range.filename); var $firstMatchRow = $("#find-in-files-results tr[data-file-index='" + range.file + "'][data-item-index='" + range.first + "']"); expect($firstMatchRow.length).toBe(1); expect($firstMatchRow.find(".line-number").text().match("\\b" + range.firstLine + "\\b")).toBeTruthy(); expect($firstMatchRow.find(".line-text").text().match(range.pattern)).toBeTruthy(); var $lastMatchRow = $("#find-in-files-results tr[data-file-index='" + range.file + "'][data-item-index='" + range.last + "']"); expect($lastMatchRow.length).toBe(1); expect($lastMatchRow.find(".line-number").text().match("\\b" + range.lastLine + "\\b")).toBeTruthy(); expect($lastMatchRow.find(".line-text").text().match(range.pattern)).toBeTruthy(); }); // Check enablement of buttons expect($("#find-in-files-results .first-page").hasClass("disabled")).toBe(!options.firstPageEnabled); expect($("#find-in-files-results .last-page").hasClass("disabled")).toBe(!options.lastPageEnabled); expect($("#find-in-files-results .prev-page").hasClass("disabled")).toBe(!options.prevPageEnabled); expect($("#find-in-files-results .next-page").hasClass("disabled")).toBe(!options.nextPageEnabled); } it("should page forward, then jump back to first page, displaying correct contents at each step", function () { openProject(SpecRunnerUtils.getTestPath("/spec/FindReplace-test-files-manyhits")); openSearchBar(); // This search will find 500 hits in 2 files. Since there are 100 hits per page, there should // be five pages, and the third page should have 50 results from the first file and 50 results // from the second file. executeSearch("find this"); runs(function () { var i; for (i = 0; i < 5; i++) { if (i > 0) { $("#find-in-files-results .next-page").click(); } expectPageDisplay(expectedPages[i]); } $("#find-in-files-results .first-page").click(); expectPageDisplay(expectedPages[0]); }); }); it("should jump to last page, then page backward, displaying correct contents at each step", function () { openProject(SpecRunnerUtils.getTestPath("/spec/FindReplace-test-files-manyhits")); executeSearch("find this"); runs(function () { var i; $("#find-in-files-results .last-page").click(); for (i = 4; i >= 0; i--) { if (i < 4) { $("#find-in-files-results .prev-page").click(); } expectPageDisplay(expectedPages[i]); } }); }); }); describe("SearchModel update on change events", function () { var oldResults, gotChange, wasQuickChange; function fullTestPath(path) { return testPath + "/" + path; } function expectUnchangedExcept(paths) { Object.keys(FindInFiles.searchModel.results).forEach(function (path) { if (paths.indexOf(path) === -1) { expect(FindInFiles.searchModel.results[path]).toEqual(oldResults[path]); } }); } beforeEach(function () { gotChange = false; oldResults = null; wasQuickChange = false; FindInFiles.searchModel.on("change.FindInFilesTest", function (event, quickChange) { gotChange = true; wasQuickChange = quickChange; }); openTestProjectCopy(defaultSourcePath); doSearch({ queryInfo: {query: "foo"}, numMatches: 14 }); runs(function () {<|fim▁hole|> oldResults = _.cloneDeep(FindInFiles.searchModel.results); }); }); afterEach(function () { FindInFiles.searchModel.off(".FindInFilesTest"); waitsForDone(CommandManager.execute(Commands.FILE_CLOSE_ALL, { _forceClose: true }), "close all files"); }); describe("when filename changes", function () { it("should handle a filename change", function () { runs(function () { FindInFiles._fileNameChangeHandler(null, fullTestPath("foo.html"), fullTestPath("newfoo.html")); }); waitsFor(function () { return gotChange; }, "model change event"); runs(function () { expectUnchangedExcept([fullTestPath("foo.html"), fullTestPath("newfoo.html")]); expect(FindInFiles.searchModel.results[fullTestPath("foo.html")]).toBeUndefined(); expect(FindInFiles.searchModel.results[fullTestPath("newfoo.html")]).toEqual(oldResults[fullTestPath("foo.html")]); expect(FindInFiles.searchModel.countFilesMatches()).toEqual({files: 3, matches: 14}); expect(wasQuickChange).toBeFalsy(); }); }); it("should handle a folder change", function () { runs(function () { FindInFiles._fileNameChangeHandler(null, fullTestPath("css"), fullTestPath("newcss")); }); waitsFor(function () { return gotChange; }, "model change event"); runs(function () { expectUnchangedExcept([fullTestPath("css/foo.css"), fullTestPath("newcss/foo.css")]); expect(FindInFiles.searchModel.results[fullTestPath("css/foo.css")]).toBeUndefined(); expect(FindInFiles.searchModel.results[fullTestPath("newcss/foo.css")]).toEqual(oldResults[fullTestPath("css/foo.css")]); expect(FindInFiles.searchModel.countFilesMatches()).toEqual({files: 3, matches: 14}); expect(wasQuickChange).toBeFalsy(); }); }); }); describe("when in-memory document changes", function () { it("should update the results when a matching line is added, updating line numbers and adding the match", function () { runs(function () { waitsForDone(CommandManager.execute(Commands.CMD_ADD_TO_WORKINGSET_AND_OPEN, { fullPath: fullTestPath("foo.html") })); }); runs(function () { var doc = DocumentManager.getOpenDocumentForPath(fullTestPath("foo.html")), i; expect(doc).toBeTruthy(); // Insert another line containing "foo" immediately above the second "foo" match. doc.replaceRange("this is a foo instance\n", {line: 5, ch: 0}); // This should update synchronously. expect(gotChange).toBe(true); var oldFileResults = oldResults[fullTestPath("foo.html")], newFileResults = FindInFiles.searchModel.results[fullTestPath("foo.html")]; // First match should be unchanged. expect(newFileResults.matches[0]).toEqual(oldFileResults.matches[0]); // Next match should be the new match. We just check the offsets here, not everything in the match record. expect(newFileResults.matches[1].start).toEqual({line: 5, ch: 10}); expect(newFileResults.matches[1].end).toEqual({line: 5, ch: 13}); // Rest of the matches should have had their lines adjusted. for (i = 2; i < newFileResults.matches.length; i++) { var newMatch = newFileResults.matches[i], oldMatch = oldFileResults.matches[i - 1]; expect(newMatch.start).toEqual({line: oldMatch.start.line + 1, ch: oldMatch.start.ch}); expect(newMatch.end).toEqual({line: oldMatch.end.line + 1, ch: oldMatch.end.ch}); } // There should be one new match. expect(FindInFiles.searchModel.countFilesMatches()).toEqual({files: 3, matches: 15}); // Make sure the model is adding the flag that will make the view debounce changes. expect(wasQuickChange).toBeTruthy(); }); }); it("should update the results when a matching line is deleted, updating line numbers and removing the match", function () { runs(function () { waitsForDone(CommandManager.execute(Commands.CMD_ADD_TO_WORKINGSET_AND_OPEN, { fullPath: fullTestPath("foo.html") })); }); runs(function () { var doc = DocumentManager.getOpenDocumentForPath(fullTestPath("foo.html")), i; expect(doc).toBeTruthy(); // Remove the second "foo" match. doc.replaceRange("", {line: 5, ch: 0}, {line: 6, ch: 0}); // This should update synchronously. expect(gotChange).toBe(true); var oldFileResults = oldResults[fullTestPath("foo.html")], newFileResults = FindInFiles.searchModel.results[fullTestPath("foo.html")]; // First match should be unchanged. expect(newFileResults.matches[0]).toEqual(oldFileResults.matches[0]); // Second match should be deleted. The rest of the matches should have their lines adjusted. for (i = 1; i < newFileResults.matches.length; i++) { var newMatch = newFileResults.matches[i], oldMatch = oldFileResults.matches[i + 1]; expect(newMatch.start).toEqual({line: oldMatch.start.line - 1, ch: oldMatch.start.ch}); expect(newMatch.end).toEqual({line: oldMatch.end.line - 1, ch: oldMatch.end.ch}); } // There should be one fewer match. expect(FindInFiles.searchModel.countFilesMatches()).toEqual({files: 3, matches: 13}); // Make sure the model is adding the flag that will make the view debounce changes. expect(wasQuickChange).toBeTruthy(); }); }); it("should replace matches in a portion of the document that was edited to include a new match", function () { runs(function () { waitsForDone(CommandManager.execute(Commands.CMD_ADD_TO_WORKINGSET_AND_OPEN, { fullPath: fullTestPath("foo.html") })); }); runs(function () { var doc = DocumentManager.getOpenDocumentForPath(fullTestPath("foo.html")), i; expect(doc).toBeTruthy(); // Replace the second and third foo matches (on two adjacent lines) with a single foo match on a single line. doc.replaceRange("this is a new foo match\n", {line: 5, ch: 0}, {line: 7, ch: 0}); // This should update synchronously. expect(gotChange).toBe(true); var oldFileResults = oldResults[fullTestPath("foo.html")], newFileResults = FindInFiles.searchModel.results[fullTestPath("foo.html")]; // First match should be unchanged. expect(newFileResults.matches[0]).toEqual(oldFileResults.matches[0]); // Second match should be changed to reflect the new position. expect(newFileResults.matches[1].start).toEqual({line: 5, ch: 14}); expect(newFileResults.matches[1].end).toEqual({line: 5, ch: 17}); // Third match should be deleted. The rest of the matches should have their lines adjusted. for (i = 2; i < newFileResults.matches.length; i++) { var newMatch = newFileResults.matches[i], oldMatch = oldFileResults.matches[i + 1]; expect(newMatch.start).toEqual({line: oldMatch.start.line - 1, ch: oldMatch.start.ch}); expect(newMatch.end).toEqual({line: oldMatch.end.line - 1, ch: oldMatch.end.ch}); } // There should be one fewer match. expect(FindInFiles.searchModel.countFilesMatches()).toEqual({files: 3, matches: 13}); // Make sure the model is adding the flag that will make the view debounce changes. expect(wasQuickChange).toBeTruthy(); }); }); it("should completely remove the document from the results list if all matches in the document are deleted", function () { runs(function () { waitsForDone(CommandManager.execute(Commands.CMD_ADD_TO_WORKINGSET_AND_OPEN, { fullPath: fullTestPath("foo.html") })); }); runs(function () { var doc = DocumentManager.getOpenDocumentForPath(fullTestPath("foo.html")); expect(doc).toBeTruthy(); // Replace all matches and check that the entire file was removed from the results list. doc.replaceRange("this will not match", {line: 4, ch: 0}, {line: 18, ch: 0}); // This should update synchronously. expect(gotChange).toBe(true); expect(FindInFiles.searchModel.results[fullTestPath("foo.html")]).toBeUndefined(); // There should be one fewer file and the matches for that file should be gone. expect(FindInFiles.searchModel.countFilesMatches()).toEqual({files: 2, matches: 7}); // Make sure the model is adding the flag that will make the view debounce changes. expect(wasQuickChange).toBeTruthy(); }); }); }); // Unfortunately, we can't easily mock file changes, so we just do them in a copy of the project. // This set of tests isn't as thorough as it could be, because it's difficult to perform file // ops that will exercise all possible scenarios of change events (e.g. change events with // both added and removed files), and conversely it's difficult to mock all the filesystem stuff // without doing a bunch of work. So this is really just a set of basic sanity tests to make // sure that stuff being refactored between the change handler and the model doesn't break // basic update functionality. describe("when on-disk file or folder changes", function () { it("should add matches for a new file", function () { var newFilePath; runs(function () { newFilePath = fullTestPath("newfoo.html"); expect(FindInFiles.searchModel.results[newFilePath]).toBeFalsy(); waitsForDone(promisify(FileSystem.getFileForPath(newFilePath), "write", "this is a new foo match\n"), "add new file"); }); waitsFor(function () { return gotChange; }, "model change event"); runs(function () { var newFileResults = FindInFiles.searchModel.results[newFilePath]; expect(newFileResults).toBeTruthy(); expect(newFileResults.matches.length).toBe(1); expect(newFileResults.matches[0].start).toEqual({line: 0, ch: 14}); expect(newFileResults.matches[0].end).toEqual({line: 0, ch: 17}); // There should be one new file and match. expect(FindInFiles.searchModel.countFilesMatches()).toEqual({files: 4, matches: 15}); }); }); it("should remove matches for a deleted file", function () { runs(function () { expect(FindInFiles.searchModel.results[fullTestPath("foo.html")]).toBeTruthy(); waitsForDone(promisify(FileSystem.getFileForPath(fullTestPath("foo.html")), "unlink"), "delete file"); }); waitsFor(function () { return gotChange; }, "model change event"); runs(function () { expect(FindInFiles.searchModel.results[fullTestPath("foo.html")]).toBeFalsy(); // There should be one fewer file and the matches should be removed. expect(FindInFiles.searchModel.countFilesMatches()).toEqual({files: 2, matches: 7}); }); }); it("should remove matches for a deleted folder", function () { runs(function () { expect(FindInFiles.searchModel.results[fullTestPath("css/foo.css")]).toBeTruthy(); waitsForDone(promisify(FileSystem.getFileForPath(fullTestPath("css")), "unlink"), "delete folder"); }); waitsFor(function () { return gotChange; }, "model change event"); runs(function () { expect(FindInFiles.searchModel.results[fullTestPath("css/foo.css")]).toBeFalsy(); // There should be one fewer file and the matches should be removed. expect(FindInFiles.searchModel.countFilesMatches()).toEqual({files: 2, matches: 11}); }); }); }); }); describe("Replace", function () { function expectProjectToMatchKnownGood(kgFolder, lineEndings, filesToSkip) { runs(function () { var testRootPath = ProjectManager.getProjectRoot().fullPath, kgRootPath = SpecRunnerUtils.getTestPath("/spec/FindReplace-known-goods/" + kgFolder + "/"); function compareKnownGoodToTestFile(kgContents, kgFilePath) { var testFilePath = testRootPath + kgFilePath.slice(kgRootPath.length); if (!filesToSkip || filesToSkip.indexOf(testFilePath) === -1) { return promisify(FileSystem.getFileForPath(testFilePath), "read").then(function (testContents) { if (lineEndings) { kgContents = FileUtils.translateLineEndings(kgContents, lineEndings); } expect(testContents).toEqual(kgContents); }); } } waitsForDone(visitAndProcessFiles(kgRootPath, compareKnownGoodToTestFile), "project comparison done"); }); } // Does a standard test for files on disk: search, replace, and check that files on disk match. // Options: // knownGoodFolder: name of folder containing known goods to match to project files on disk // lineEndings: optional, one of the FileUtils.LINE_ENDINGS_* constants // - if specified, files on disk are expected to have these line endings // uncheckMatches: optional array of {file: string, index: number} items to uncheck; if // index unspecified, will uncheck all matches in file function doBasicTest(options) { doSearch(options); runs(function () { if (options.uncheckMatches) { options.uncheckMatches.forEach(function (matchToUncheck) { var matches = searchResults[testPath + matchToUncheck.file].matches; if (matchToUncheck.index) { matches[matchToUncheck.index].isChecked = false; } else { matches.forEach(function (match) { match.isChecked = false; }); } }); } waitsForDone(doReplace(options), "finish replacement"); }); expectProjectToMatchKnownGood(options.knownGoodFolder, options.lineEndings); } // Like doBasicTest, but expects some files to have specific errors. // Options: same as doBasicTest, plus: // test: optional function (which must contain one or more runs blocks) to run between // search and replace // errors: array of errors expected to occur (in the same format as performReplacement() returns) function doTestWithErrors(options) { var done = false; doSearch(options); if (options.test) { // The test function *must* contain one or more runs blocks. options.test(); } runs(function () { doReplace(options) .then(function () { expect("should fail due to error").toBe(true); done = true; }, function (errors) { expect(errors).toEqual(options.errors); done = true; }); }); waitsFor(function () { return done; }, 1000, "finish replacement"); expectProjectToMatchKnownGood(options.knownGoodFolder, options.lineEndings); } function expectInMemoryFiles(options) { runs(function () { waitsForDone(Async.doInParallel(options.inMemoryFiles, function (filePath) { var fullPath; // If this is a full file path (as would be the case for an external file), handle it specially. if (typeof filePath === "object" && filePath.fullPath) { fullPath = filePath.fullPath; filePath = "/" + FileUtils.getBaseName(fullPath); } else { fullPath = testPath + filePath; } // Check that the document open in memory was changed and matches the expected replaced version of that file. var doc = DocumentManager.getOpenDocumentForPath(fullPath); expect(doc).toBeTruthy(); expect(doc.isDirty).toBe(true); var kgPath = SpecRunnerUtils.getTestPath("/spec/FindReplace-known-goods/" + options.inMemoryKGFolder + filePath), kgFile = FileSystem.getFileForPath(kgPath); return promisify(kgFile, "read").then(function (contents) { expect(doc.getText(true)).toEqual(contents); }); }), "check in memory file contents"); }); } // Like doBasicTest, but expects one or more files to be open in memory and the replacements to happen there. // Options: same as doBasicTest, plus: // inMemoryFiles: array of project-relative paths (each starting with "/") to files that should be open in memory // inMemoryKGFolder: folder containing known goods to compare each of the inMemoryFiles to function doInMemoryTest(options) { // Like the basic test, we expect everything on disk to match the kgFolder (which means the file open in memory // should *not* have changed on disk yet). doBasicTest(options); expectInMemoryFiles(options); } afterEach(function () { runs(function () { waitsForDone(CommandManager.execute(Commands.FILE_CLOSE_ALL, { _forceClose: true }), "close all files"); }); }); describe("Engine", function () { it("should replace all instances of a simple string in a project on disk case-insensitively", function () { openTestProjectCopy(defaultSourcePath); doBasicTest({ queryInfo: {query: "foo"}, numMatches: 14, replaceText: "bar", knownGoodFolder: "simple-case-insensitive" }); }); it("should replace all instances of a simple string in a project on disk case-sensitively", function () { openTestProjectCopy(defaultSourcePath); doBasicTest({ queryInfo: {query: "foo", isCaseSensitive: true}, numMatches: 9, replaceText: "bar", knownGoodFolder: "simple-case-sensitive" }); }); it("should replace all instances of a regexp in a project on disk case-insensitively with a simple replace string", function () { openTestProjectCopy(defaultSourcePath); doBasicTest({ queryInfo: {query: "\\b[a-z]{3}\\b", isRegexp: true}, numMatches: 33, replaceText: "CHANGED", knownGoodFolder: "regexp-case-insensitive" }); }); it("should replace all instances of a regexp that spans multiple lines in a project on disk", function () { openTestProjectCopy(defaultSourcePath); // This query should find each rule in the CSS file (but not in the JS file since there's more than one line // between each pair of braces). doBasicTest({ queryInfo: {query: "\\{\\n[^\\n]*\\n\\}", isRegexp: true}, numMatches: 4, replaceText: "CHANGED", knownGoodFolder: "regexp-replace-multiline" }); }); it("should replace all instances of a regexp that spans multiple lines in a project in memory", function () { openTestProjectCopy(defaultSourcePath); // This query should find each rule in the CSS file (but not in the JS file since there's more than one line // between each pair of braces). doInMemoryTest({ queryInfo: {query: "\\{\\n[^\\n]*\\n\\}", isRegexp: true}, numMatches: 4, replaceText: "CHANGED", knownGoodFolder: "unchanged", forceFilesOpen: true, inMemoryFiles: ["/css/foo.css"], inMemoryKGFolder: "regexp-replace-multiline" }); }); it("should replace all instances of a regexp that spans multiple lines in a project on disk when the last line is a partial match", function () { openTestProjectCopy(defaultSourcePath); // This query should match from the open brace through to (and including) the first colon of each rule in the // CSS file. doBasicTest({ queryInfo: {query: "\\{\\n[^:]+:", isRegexp: true}, numMatches: 4, replaceText: "CHANGED", knownGoodFolder: "regexp-replace-multiline-partial" }); }); it("should replace all instances of a regexp that spans multiple lines in a project in memory when the last line is a partial match", function () { openTestProjectCopy(defaultSourcePath); // This query should match from the open brace through to (and including) the first colon of each rule in the // CSS file. doInMemoryTest({ queryInfo: {query: "\\{\\n[^:]+:", isRegexp: true}, numMatches: 4, replaceText: "CHANGED", knownGoodFolder: "unchanged", forceFilesOpen: true, inMemoryFiles: ["/css/foo.css"], inMemoryKGFolder: "regexp-replace-multiline-partial" }); }); it("should replace all instances of a regexp in a project on disk case-sensitively with a simple replace string", function () { openTestProjectCopy(defaultSourcePath); doBasicTest({ queryInfo: {query: "\\b[a-z]{3}\\b", isRegexp: true, isCaseSensitive: true}, numMatches: 25, replaceText: "CHANGED", knownGoodFolder: "regexp-case-sensitive" }); }); it("should replace instances of a regexp with a $-substitution on disk", function () { openTestProjectCopy(defaultSourcePath); doBasicTest({ queryInfo: {query: "\\b([a-z]{3})\\b", isRegexp: true}, numMatches: 33, replaceText: "[$1]", knownGoodFolder: "regexp-dollar-replace" }); }); it("should replace instances of a regexp with a $-substitution in in-memory files", function () { // This test case is necessary because the in-memory case goes through a separate code path before it deals with // the replace text. openTestProjectCopy(defaultSourcePath); doInMemoryTest({ queryInfo: {query: "\\b([a-z]{3})\\b", isRegexp: true}, numMatches: 33, replaceText: "[$1]", knownGoodFolder: "unchanged", forceFilesOpen: true, inMemoryFiles: ["/css/foo.css", "/foo.html", "/foo.js"], inMemoryKGFolder: "regexp-dollar-replace" }); }); it("should replace instances of regexp with 0-length matches on disk", function () { openTestProjectCopy(defaultSourcePath); doBasicTest({ queryInfo: {query: "^", isRegexp: true}, numMatches: 55, replaceText: "CHANGED", knownGoodFolder: "regexp-zero-length" }); }); it("should replace instances of regexp with 0-length matches in memory", function () { openTestProjectCopy(defaultSourcePath); doInMemoryTest({ queryInfo: {query: "^", isRegexp: true}, numMatches: 55, replaceText: "CHANGED", knownGoodFolder: "unchanged", forceFilesOpen: true, inMemoryFiles: ["/css/foo.css", "/foo.html", "/foo.js"], inMemoryKGFolder: "regexp-zero-length" }); }); it("should replace instances of a string in a project respecting CRLF line endings", function () { openTestProjectCopy(defaultSourcePath, FileUtils.LINE_ENDINGS_CRLF); doBasicTest({ queryInfo: {query: "foo"}, numMatches: 14, replaceText: "bar", knownGoodFolder: "simple-case-insensitive", lineEndings: FileUtils.LINE_ENDINGS_CRLF }); }); it("should replace instances of a string in a project respecting LF line endings", function () { openTestProjectCopy(defaultSourcePath, FileUtils.LINE_ENDINGS_LF); doBasicTest({ queryInfo: {query: "foo"}, numMatches: 14, replaceText: "bar", knownGoodFolder: "simple-case-insensitive", lineEndings: FileUtils.LINE_ENDINGS_LF }); }); it("should not replace unchecked matches on disk", function () { openTestProjectCopy(defaultSourcePath); doBasicTest({ queryInfo: {query: "foo"}, numMatches: 14, uncheckMatches: [{file: "/css/foo.css"}], replaceText: "bar", knownGoodFolder: "simple-case-insensitive-except-foo.css" }); }); it("should do all in-memory replacements synchronously, so user can't accidentally edit document after start of replace process", function () { openTestProjectCopy(defaultSourcePath); // Open two of the documents we want to replace in memory. runs(function () { waitsForDone(CommandManager.execute(Commands.CMD_ADD_TO_WORKINGSET_AND_OPEN, { fullPath: testPath + "/css/foo.css" }), "opening document"); }); runs(function () { waitsForDone(CommandManager.execute(Commands.CMD_ADD_TO_WORKINGSET_AND_OPEN, { fullPath: testPath + "/foo.js" }), "opening document"); }); // We can't use expectInMemoryFiles(), since this test requires everything to happen fully synchronously // (no file reads) once the replace has started. So we read the files here. var kgFileContents = {}; runs(function () { var kgPath = SpecRunnerUtils.getTestPath("/spec/FindReplace-known-goods/simple-case-insensitive"); waitsForDone(visitAndProcessFiles(kgPath, function (contents, fullPath) { // Translate line endings to in-memory document style (always LF) kgFileContents[fullPath.slice(kgPath.length)] = FileUtils.translateLineEndings(contents, FileUtils.LINE_ENDINGS_LF); }), "reading known good"); }); doSearch({ queryInfo: {query: "foo"}, numMatches: 14, replaceText: "bar" }); runs(function () { // Start the replace, but don't wait for it to complete. Since the in-memory replacements should occur // synchronously, the in-memory documents should have already been changed. This means we don't have to // worry about detecting changes in documents once the replace starts. (If the user had changed // the document after the search but before the replace started, we would have already closed the panel, // preventing the user from doing a replace.) var promise = FindInFiles.doReplace(searchResults, "bar"); // Check the in-memory contents against the known goods. ["/css/foo.css", "/foo.js"].forEach(function (filename) { var fullPath = testPath + filename, doc = DocumentManager.getOpenDocumentForPath(fullPath); expect(doc).toBeTruthy(); expect(doc.isDirty).toBe(true); expect(doc.getText()).toEqual(kgFileContents[filename]); }); // Finish the replace operation, which should go ahead and do the file on disk. waitsForDone(promise); }); runs(function () { // Now the file on disk should have been replaced too. waitsForDone(promisify(FileSystem.getFileForPath(testPath + "/foo.html"), "read").then(function (contents) { expect(FileUtils.translateLineEndings(contents, FileUtils.LINE_ENDINGS_LF)).toEqual(kgFileContents["/foo.html"]); }), "checking known good"); }); }); it("should return an error and not do the replacement in files that have changed on disk since the search", function () { openTestProjectCopy(defaultSourcePath); doTestWithErrors({ queryInfo: {query: "foo"}, numMatches: 14, replaceText: "bar", knownGoodFolder: "changed-file", test: function () { // Wait for one second to make sure that the changed file gets an updated timestamp. // TODO: this seems like a FileSystem issue - we don't get timestamp changes with a resolution // of less than one second. waits(1000); runs(function () { // Clone the results so we don't use the version that's auto-updated by FindInFiles when we modify the file // on disk. This case might not usually come up in the real UI if we always guarantee that the results list will // be auto-updated, but we want to make sure there's no edge case where we missed an update and still clobber the // file on disk anyway. searchResults = _.cloneDeep(searchResults); waitsForDone(promisify(FileSystem.getFileForPath(testPath + "/css/foo.css"), "write", "/* changed content */"), "modify file"); }); }, errors: [{item: testPath + "/css/foo.css", error: FindUtils.ERROR_FILE_CHANGED}] }); }); it("should return an error if a write fails", function () { openTestProjectCopy(defaultSourcePath); // Return a fake error when we try to write to the CSS file. (Note that this is spying on the test window's File module.) var writeSpy = spyOn(File.prototype, "write").andCallFake(function (data, options, callback) { if (typeof options === "function") { callback = options; } else { callback = callback || function () {}; } if (this.fullPath === testPath + "/css/foo.css") { callback(FileSystemError.NOT_WRITABLE); } else { return writeSpy.originalValue.apply(this, arguments); } }); doTestWithErrors({ queryInfo: {query: "foo"}, numMatches: 14, replaceText: "bar", knownGoodFolder: "simple-case-insensitive-except-foo.css", errors: [{item: testPath + "/css/foo.css", error: FileSystemError.NOT_WRITABLE}] }); }); it("should return an error if a match timestamp doesn't match an in-memory document timestamp", function () { openTestProjectCopy(defaultSourcePath); runs(function () { waitsForDone(CommandManager.execute(Commands.CMD_ADD_TO_WORKINGSET_AND_OPEN, { fullPath: testPath + "/css/foo.css" }), "opening document"); }); doTestWithErrors({ queryInfo: {query: "foo"}, numMatches: 14, replaceText: "bar", knownGoodFolder: "simple-case-insensitive-except-foo.css", test: function () { runs(function () { // Clone the results so we don't use the version that's auto-updated by FindInFiles when we modify the file // on disk. This case might not usually come up in the real UI if we always guarantee that the results list will // be auto-updated, but we want to make sure there's no edge case where we missed an update and still clobber the // file on disk anyway. searchResults = _.cloneDeep(searchResults); var oldTimestamp = searchResults[testPath + "/css/foo.css"].timestamp; searchResults[testPath + "/css/foo.css"].timestamp = new Date(oldTimestamp.getTime() - 5000); }); }, errors: [{item: testPath + "/css/foo.css", error: FindUtils.ERROR_FILE_CHANGED}] }); }); it("should do the replacement in memory for a file open in an Editor in the working set", function () { openTestProjectCopy(defaultSourcePath); runs(function () { waitsForDone(CommandManager.execute(Commands.CMD_ADD_TO_WORKINGSET_AND_OPEN, {fullPath: testPath + "/css/foo.css"}), "add file to working set"); }); doInMemoryTest({ queryInfo: {query: "foo"}, numMatches: 14, replaceText: "bar", knownGoodFolder: "simple-case-insensitive-except-foo.css", inMemoryFiles: ["/css/foo.css"], inMemoryKGFolder: "simple-case-insensitive" }); }); it("should do the search/replace in the current document content for a dirty in-memory document", function () { openTestProjectCopy(defaultSourcePath); var options = { queryInfo: {query: "foo"}, numMatches: 15, replaceText: "bar", inMemoryFiles: ["/css/foo.css"], inMemoryKGFolder: "simple-case-insensitive-modified" }; runs(function () { waitsForDone(CommandManager.execute(Commands.CMD_ADD_TO_WORKINGSET_AND_OPEN, {fullPath: testPath + "/css/foo.css"}), "add file to working set"); }); runs(function () { var doc = DocumentManager.getOpenDocumentForPath(testPath + "/css/foo.css"); expect(doc).toBeTruthy(); doc.replaceRange("/* added a foo line */\n", {line: 0, ch: 0}); }); doSearch(options); runs(function () { waitsForDone(doReplace(options), "replace done"); }); expectInMemoryFiles(options); expectProjectToMatchKnownGood("simple-case-insensitive-modified", null, [testPath + "/css/foo.css"]); }); it("should do the replacement in memory for a file open in an Editor that's not in the working set", function () { openTestProjectCopy(defaultSourcePath); runs(function () { waitsForDone(CommandManager.execute(Commands.FILE_OPEN, {fullPath: testPath + "/css/foo.css"}), "open file"); }); doInMemoryTest({ queryInfo: {query: "foo"}, numMatches: 14, replaceText: "bar", knownGoodFolder: "simple-case-insensitive-except-foo.css", inMemoryFiles: ["/css/foo.css"], inMemoryKGFolder: "simple-case-insensitive" }); }); it("should do the replacement in memory for a file that's in the working set but not yet open in an editor", function () { openTestProjectCopy(defaultSourcePath); runs(function () { MainViewManager.addToWorkingSet(MainViewManager.ACTIVE_PANE, FileSystem.getFileForPath(testPath + "/css/foo.css")); }); doInMemoryTest({ queryInfo: {query: "foo"}, numMatches: 14, replaceText: "bar", knownGoodFolder: "simple-case-insensitive-except-foo.css", inMemoryFiles: ["/css/foo.css"], inMemoryKGFolder: "simple-case-insensitive" }); }); it("should open the document in an editor and do the replacement there if the document is open but not in an Editor", function () { var doc, openFilePath; openTestProjectCopy(defaultSourcePath); runs(function () { openFilePath = testPath + "/css/foo.css"; waitsForDone(DocumentManager.getDocumentForPath(openFilePath).done(function (d) { doc = d; doc.addRef(); }), "get document"); }); doInMemoryTest({ queryInfo: {query: "foo"}, numMatches: 14, replaceText: "bar", knownGoodFolder: "simple-case-insensitive-except-foo.css", inMemoryFiles: ["/css/foo.css"], inMemoryKGFolder: "simple-case-insensitive" }); runs(function () { var workingSet = MainViewManager.getWorkingSet(MainViewManager.ALL_PANES); expect(workingSet.some(function (file) { return file.fullPath === openFilePath; })).toBe(true); doc.releaseRef(); }); }); it("should open files and do all replacements in memory if forceFilesOpen is true", function () { openTestProjectCopy(defaultSourcePath); doInMemoryTest({ queryInfo: {query: "foo"}, numMatches: 14, replaceText: "bar", knownGoodFolder: "unchanged", forceFilesOpen: true, inMemoryFiles: ["/css/foo.css", "/foo.html", "/foo.js"], inMemoryKGFolder: "simple-case-insensitive" }); }); it("should not perform unchecked matches in memory", function () { openTestProjectCopy(defaultSourcePath); doInMemoryTest({ queryInfo: {query: "foo"}, numMatches: 14, uncheckMatches: [{file: "/css/foo.css", index: 1}, {file: "/foo.html", index: 3}], replaceText: "bar", knownGoodFolder: "unchanged", forceFilesOpen: true, inMemoryFiles: ["/css/foo.css", "/foo.html", "/foo.js"], inMemoryKGFolder: "simple-case-insensitive-unchecked" }); }); it("should not perform unchecked matches on disk", function () { openTestProjectCopy(defaultSourcePath); doBasicTest({ queryInfo: {query: "foo"}, numMatches: 14, uncheckMatches: [{file: "/css/foo.css", index: 1}, {file: "/foo.html", index: 3}], replaceText: "bar", knownGoodFolder: "simple-case-insensitive-unchecked" }); }); it("should select the first modified file in the working set if replacements are done in memory and current editor wasn't affected", function () { openTestProjectCopy(defaultSourcePath); runs(function () { waitsForDone(CommandManager.execute(Commands.CMD_ADD_TO_WORKINGSET_AND_OPEN, {fullPath: testPath + "/bar.txt"}), "open file"); }); doInMemoryTest({ queryInfo: {query: "foo"}, numMatches: 14, replaceText: "bar", knownGoodFolder: "unchanged", forceFilesOpen: true, inMemoryFiles: ["/css/foo.css", "/foo.html", "/foo.js"], inMemoryKGFolder: "simple-case-insensitive" }); runs(function () { var expectedFile = testPath + "/foo.html"; expect(DocumentManager.getCurrentDocument().file.fullPath).toBe(expectedFile); expect(MainViewManager.findInWorkingSet(MainViewManager.ACTIVE_PANE, expectedFile)).not.toBe(-1); }); }); it("should select the first modified file in the working set if replacements are done in memory and no editor was open", function () { openTestProjectCopy(defaultSourcePath); var testFiles = ["/css/foo.css", "/foo.html", "/foo.js"]; doInMemoryTest({ queryInfo: {query: "foo"}, numMatches: 14, replaceText: "bar", knownGoodFolder: "unchanged", forceFilesOpen: true, inMemoryFiles: testFiles, inMemoryKGFolder: "simple-case-insensitive" }); runs(function () { // since nothing was opened prior to doing the // replacements then the first file modified will be opened. // This may not be the first item in the array above // since the files are sorted differently in performReplacements // and the replace is performed asynchronously. // So, just ensure that *something* was opened expect(DocumentManager.getCurrentDocument().file.fullPath).toBeTruthy(); testFiles.forEach(function (relPath) { expect(MainViewManager.findInWorkingSet(MainViewManager.ACTIVE_PANE, testPath + relPath)).not.toBe(-1); }); }); }); it("should select the first modified file in the working set if replacements are done in memory and there were no matches checked for current editor", function () { openTestProjectCopy(defaultSourcePath); runs(function () { waitsForDone(CommandManager.execute(Commands.CMD_ADD_TO_WORKINGSET_AND_OPEN, {fullPath: testPath + "/css/foo.css"}), "open file"); }); doInMemoryTest({ queryInfo: {query: "foo"}, numMatches: 14, uncheckMatches: [{file: "/css/foo.css"}], replaceText: "bar", knownGoodFolder: "unchanged", forceFilesOpen: true, inMemoryFiles: ["/foo.html", "/foo.js"], inMemoryKGFolder: "simple-case-insensitive-except-foo.css" }); runs(function () { expect(DocumentManager.getCurrentDocument().file.fullPath).toEqual(testPath + "/foo.html"); }); }); }); describe("UI", function () { function executeReplace(findText, replaceText, fromKeyboard) { runs(function () { FindInFiles._searchDone = false; FindInFiles._replaceDone = false; $("#find-what").val(findText).trigger("input"); $("#replace-with").val(replaceText).trigger("input"); if (fromKeyboard) { SpecRunnerUtils.simulateKeyEvent(KeyEvent.DOM_VK_RETURN, "keydown", $("#replace-with").get(0)); } else { $("#replace-all").click(); } }); } function showSearchResults(findText, replaceText, fromKeyboard) { openTestProjectCopy(defaultSourcePath); openSearchBar(null, true); executeReplace(findText, replaceText, fromKeyboard); waitsFor(function () { return FindInFiles._searchDone; }, "search finished"); } afterEach(function () { closeSearchBar(); }); describe("Replace in Files Bar", function () { it("should only show a Replace All button", function () { openTestProjectCopy(defaultSourcePath); openSearchBar(null, true); runs(function () { expect($("#replace-yes").length).toBe(0); expect($("#replace-all").length).toBe(1); }); }); it("should disable the Replace button if query is empty", function () { openTestProjectCopy(defaultSourcePath); openSearchBar(null, true); runs(function () { $("#find-what").val("").trigger("input"); expect($("#replace-all").is(":disabled")).toBe(true); }); }); it("should enable the Replace button if the query is a non-empty string", function () { openTestProjectCopy(defaultSourcePath); openSearchBar(null, true); runs(function () { $("#find-what").val("my query").trigger("input"); expect($("#replace-all").is(":disabled")).toBe(false); }); }); it("should disable the Replace button if query is an invalid regexp", function () { openTestProjectCopy(defaultSourcePath); openSearchBar(null, true); runs(function () { $("#find-regexp").click(); $("#find-what").val("[invalid").trigger("input"); expect($("#replace-all").is(":disabled")).toBe(true); }); }); it("should enable the Replace button if query is a valid regexp", function () { openTestProjectCopy(defaultSourcePath); openSearchBar(null, true); runs(function () { $("#find-regexp").click(); $("#find-what").val("[valid]").trigger("input"); expect($("#replace-all").is(":disabled")).toBe(false); }); }); it("should start with focus in Find, and set focus to the Replace field when the user hits enter in the Find field", function () { openTestProjectCopy(defaultSourcePath); openSearchBar(null, true); runs(function () { // For some reason using $().is(":focus") here is flaky. expect(testWindow.document.activeElement).toBe($("#find-what").get(0)); SpecRunnerUtils.simulateKeyEvent(KeyEvent.DOM_VK_RETURN, "keydown", $("#find-what").get(0)); expect(testWindow.document.activeElement).toBe($("#replace-with").get(0)); }); }); }); describe("Full workflow", function () { it("should prepopulate the find bar with selected text", function () { var doc, editor; openTestProjectCopy(defaultSourcePath); runs(function () { waitsForDone(CommandManager.execute(Commands.CMD_ADD_TO_WORKINGSET_AND_OPEN, { fullPath: testPath + "/foo.html" }), "open file"); }); runs(function () { doc = DocumentManager.getOpenDocumentForPath(testPath + "/foo.html"); expect(doc).toBeTruthy(); MainViewManager._edit(MainViewManager.ACTIVE_PANE, doc); editor = doc._masterEditor; expect(editor).toBeTruthy(); editor.setSelection({line: 4, ch: 7}, {line: 4, ch: 10}); }); openSearchBar(null); runs(function () { expect($("#find-what").val()).toBe("Foo"); }); waitsForDone(CommandManager.execute(Commands.FILE_CLOSE_ALL), "closing all files"); }); it("should prepopulate the find bar with only first line of selected text", function () { var doc, editor; openTestProjectCopy(defaultSourcePath); runs(function () { waitsForDone(CommandManager.execute(Commands.CMD_ADD_TO_WORKINGSET_AND_OPEN, { fullPath: testPath + "/foo.html" }), "open file"); }); runs(function () { doc = DocumentManager.getOpenDocumentForPath(testPath + "/foo.html"); expect(doc).toBeTruthy(); MainViewManager._edit(MainViewManager.ACTIVE_PANE, doc); editor = doc._masterEditor; expect(editor).toBeTruthy(); editor.setSelection({line: 4, ch: 7}, {line: 6, ch: 10}); }); openSearchBar(null); runs(function () { expect($("#find-what").val()).toBe("Foo</title>"); }); waitsForDone(CommandManager.execute(Commands.FILE_CLOSE_ALL), "closing all files"); }); it("should show results from the search with all checkboxes checked", function () { showSearchResults("foo", "bar"); runs(function () { expect($("#find-in-files-results").length).toBe(1); expect($("#find-in-files-results .check-one").length).toBe(14); expect($("#find-in-files-results .check-one:checked").length).toBe(14); }); }); it("should do a simple search/replace all from find bar, opening results in memory, when user clicks on Replace... button", function () { showSearchResults("foo", "bar"); // Click the "Replace" button in the search panel - this should kick off the replace runs(function () { $(".replace-checked").click(); }); waitsFor(function () { return FindInFiles._replaceDone; }, "replace finished"); expectInMemoryFiles({ inMemoryFiles: ["/css/foo.css", "/foo.html", "/foo.js"], inMemoryKGFolder: "simple-case-insensitive" }); }); it("should do a simple search/replace all from find bar, opening results in memory, when user hits Enter in Replace field", function () { showSearchResults("foo", "bar"); // Click the "Replace" button in the search panel - this should kick off the replace runs(function () { $(".replace-checked").click(); }); waitsFor(function () { return FindInFiles._replaceDone; }, "replace finished"); expectInMemoryFiles({ inMemoryFiles: ["/css/foo.css", "/foo.html", "/foo.js"], inMemoryKGFolder: "simple-case-insensitive" }); }); it("should do a search in folder, replace all from find bar", function () { openTestProjectCopy(defaultSourcePath); var dirEntry = FileSystem.getDirectoryForPath(testPath + "/css/"); openSearchBar(dirEntry, true); executeReplace("foo", "bar", true); waitsFor(function () { return FindInFiles._searchDone; }, "search finished"); // Click the "Replace" button in the search panel - this should kick off the replace runs(function () { $(".replace-checked").click(); }); waitsFor(function () { return FindInFiles._replaceDone; }, "replace finished"); expectInMemoryFiles({ inMemoryFiles: ["/css/foo.css"], inMemoryKGFolder: "simple-case-insensitive-only-foo.css" }); }); it("should do a search in file, replace all from find bar", function () { openTestProjectCopy(defaultSourcePath); var fileEntry = FileSystem.getFileForPath(testPath + "/css/foo.css"); openSearchBar(fileEntry, true); executeReplace("foo", "bar", true); waitsFor(function () { return FindInFiles._searchDone; }, "search finished"); // Click the "Replace" button in the search panel - this should kick off the replace runs(function () { $(".replace-checked").click(); }); waitsFor(function () { return FindInFiles._replaceDone; }, "replace finished"); expectInMemoryFiles({ inMemoryFiles: ["/css/foo.css"], inMemoryKGFolder: "simple-case-insensitive-only-foo.css" }); }); it("should do a regexp search/replace from find bar", function () { openTestProjectCopy(defaultSourcePath); openSearchBar(null, true); runs(function () { $("#find-regexp").click(); }); executeReplace("\\b([a-z]{3})\\b", "[$1]", true); waitsFor(function () { return FindInFiles._searchDone; }, "search finished"); // Click the "Replace" button in the search panel - this should kick off the replace runs(function () { $(".replace-checked").click(); }); waitsFor(function () { return FindInFiles._replaceDone; }, "replace finished"); expectInMemoryFiles({ inMemoryFiles: ["/css/foo.css", "/foo.html", "/foo.js"], inMemoryKGFolder: "regexp-dollar-replace" }); }); it("should do a case-sensitive search/replace from find bar", function () { openTestProjectCopy(defaultSourcePath); openSearchBar(null, true); runs(function () { $("#find-case-sensitive").click(); }); executeReplace("foo", "bar", true); waitsFor(function () { return FindInFiles._searchDone; }, "search finished"); // Click the "Replace" button in the search panel - this should kick off the replace runs(function () { $(".replace-checked").click(); }); waitsFor(function () { return FindInFiles._replaceDone; }, "replace finished"); expectInMemoryFiles({ inMemoryFiles: ["/css/foo.css", "/foo.html", "/foo.js"], inMemoryKGFolder: "simple-case-sensitive" }); }); it("should warn and do changes on disk if there are changes in >20 files", function () { openTestProjectCopy(SpecRunnerUtils.getTestPath("/spec/FindReplace-test-files-large")); openSearchBar(null, true); executeReplace("foo", "bar"); waitsFor(function () { return FindInFiles._searchDone; }, "search finished"); // Click the "Replace" button in the search panel - this should cause the dialog to appear runs(function () { $(".replace-checked").click(); }); runs(function () { expect(FindInFiles._replaceDone).toBeFalsy(); }); var $okButton; waitsFor(function () { $okButton = $(".dialog-button[data-button-id='ok']"); return !!$okButton.length; }, "dialog appearing"); runs(function () { expect($okButton.length).toBe(1); expect($okButton.text()).toBe(Strings.BUTTON_REPLACE_WITHOUT_UNDO); $okButton.click(); }); waitsFor(function () { return FindInFiles._replaceDone; }, "replace finished"); expectProjectToMatchKnownGood("simple-case-insensitive-large"); }); it("should not do changes on disk if Cancel is clicked in 'too many files' dialog", function () { spyOn(FindInFiles, "doReplace").andCallThrough(); openTestProjectCopy(SpecRunnerUtils.getTestPath("/spec/FindReplace-test-files-large")); openSearchBar(null, true); executeReplace("foo", "bar"); waitsFor(function () { return FindInFiles._searchDone; }, "search finished"); // Click the "Replace" button in the search panel - this should cause the dialog to appear runs(function () { $(".replace-checked").click(); }); runs(function () { expect(FindInFiles._replaceDone).toBeFalsy(); }); var $cancelButton; waitsFor(function () { $cancelButton = $(".dialog-button[data-button-id='cancel']"); return !!$cancelButton.length; }); runs(function () { expect($cancelButton.length).toBe(1); $cancelButton.click(); }); waitsFor(function () { return $(".dialog-button[data-button-id='cancel']").length === 0; }, "dialog dismissed"); runs(function () { expect(FindInFiles.doReplace).not.toHaveBeenCalled(); // Panel should be left open. expect($("#find-in-files-results").is(":visible")).toBeTruthy(); }); }); it("should do single-file Replace All in an open file in the project", function () { openTestProjectCopy(defaultSourcePath); runs(function () { waitsForDone(CommandManager.execute(Commands.CMD_ADD_TO_WORKINGSET_AND_OPEN, { fullPath: testPath + "/foo.js" }), "open file"); }); runs(function () { waitsForDone(CommandManager.execute(Commands.CMD_REPLACE), "open single-file replace bar"); }); waitsFor(function () { return $(".modal-bar").length === 1; }, "search bar open"); executeReplace("foo", "bar"); waitsFor(function () { return FindInFiles._searchDone; }, "search finished"); // Click the "Replace" button in the search panel - this should kick off the replace runs(function () { $(".replace-checked").click(); }); waitsFor(function () { return FindInFiles._replaceDone; }, "replace finished"); expectInMemoryFiles({ inMemoryFiles: ["/foo.js"], inMemoryKGFolder: "simple-case-insensitive" }); }); it("should do single-file Replace All in a non-project file", function () { // Open an empty project. var blankProject = SpecRunnerUtils.getTempDirectory() + "/blank-project", externalFilePath = defaultSourcePath + "/foo.js"; runs(function () { var dirEntry = FileSystem.getDirectoryForPath(blankProject); waitsForDone(promisify(dirEntry, "create")); }); SpecRunnerUtils.loadProjectInTestWindow(blankProject); runs(function () { waitsForDone(CommandManager.execute(Commands.CMD_ADD_TO_WORKINGSET_AND_OPEN, { fullPath: externalFilePath }), "open external file"); }); runs(function () { waitsForDone(CommandManager.execute(Commands.CMD_REPLACE), "open single-file replace bar"); }); waitsFor(function () { return $(".modal-bar").length === 1; }, "search bar open"); executeReplace("foo", "bar"); waitsFor(function () { return FindInFiles._searchDone; }, "search finished"); // Click the "Replace" button in the search panel - this should kick off the replace runs(function () { $(".replace-checked").click(); }); waitsFor(function () { return FindInFiles._replaceDone; }, "replace finished"); expectInMemoryFiles({ inMemoryFiles: [{fullPath: externalFilePath}], // pass a full file path since this is an external file inMemoryKGFolder: "simple-case-insensitive" }); }); it("should show an error dialog if errors occurred during the replacement", function () { showSearchResults("foo", "bar"); runs(function () { spyOn(FindInFiles, "doReplace").andCallFake(function () { return new $.Deferred().reject([ {item: testPath + "/css/foo.css", error: FindUtils.ERROR_FILE_CHANGED}, {item: testPath + "/foo.html", error: FileSystemError.NOT_WRITABLE} ]); }); }); runs(function () { // This will call our mock doReplace $(".replace-checked").click(); }); var $dlg; waitsFor(function () { $dlg = $(".error-dialog"); return !!$dlg.length; }, "dialog appearing"); runs(function () { expect($dlg.length).toBe(1); // Both files should be mentioned in the dialog. var text = $dlg.find(".dialog-message").text(); // Have to check this in a funny way because breakableUrl() adds a special character after the slash. expect(text.match(/css\/.*foo.css/)).not.toBe(-1); expect(text.indexOf(StringUtils.breakableUrl("foo.html"))).not.toBe(-1); $dlg.find(".dialog-button[data-button-id='ok']").click(); expect($(".error-dialog").length).toBe(0); }); }); }); // TODO: these could be split out into unit tests, but would need to be able to instantiate // a SearchResultsView in the test runner window. describe("Checkbox interactions", function () { it("should uncheck all checkboxes and update model when Check All is clicked while checked", function () { showSearchResults("foo", "bar"); runs(function () { expect($(".check-all").is(":checked")).toBeTruthy(); $(".check-all").click(); expect($(".check-all").is(":checked")).toBeFalsy(); expect($(".check-one:checked").length).toBe(0); expect(_.find(FindInFiles.searchModel.results, function (result) { return _.find(result.matches, function (match) { return match.isChecked; }); })).toBeFalsy(); }); }); it("should uncheck one checkbox and update model, unchecking the Check All checkbox", function () { showSearchResults("foo", "bar"); runs(function () { $(".check-one").eq(1).click(); expect($(".check-one").eq(1).is(":checked")).toBeFalsy(); expect($(".check-all").is(":checked")).toBeFalsy(); // In the sorting, this item should be the second match in the first file, which is foo.html var uncheckedMatch = FindInFiles.searchModel.results[testPath + "/foo.html"].matches[1]; expect(uncheckedMatch.isChecked).toBe(false); // Check that all items in the model besides the unchecked one to be checked. expect(_.every(FindInFiles.searchModel.results, function (result) { return _.every(result.matches, function (match) { if (match === uncheckedMatch) { // This one is already expected to be unchecked. return true; } return match.isChecked; }); })).toBeTruthy(); }); }); it("should re-check unchecked checkbox and update model after clicking Check All again", function () { showSearchResults("foo", "bar"); runs(function () { $(".check-one").eq(1).click(); expect($(".check-one").eq(1).is(":checked")).toBeFalsy(); expect($(".check-all").is(":checked")).toBeFalsy(); $(".check-all").click(); expect($(".check-all").is(":checked")).toBeTruthy(); expect($(".check-one:checked").length).toEqual($(".check-one").length); expect(_.every(FindInFiles.searchModel.results, function (result) { return _.every(result.matches, function (match) { return match.isChecked; }); })).toBeTruthy(); }); }); // TODO: checkboxes with paging }); // Untitled documents are covered in the "Search -> Replace All in untitled document" cases above. describe("Panel closure on changes", function () { it("should close the panel and detach listeners if a file is modified on disk", function () { showSearchResults("foo", "bar"); runs(function () { expect($("#find-in-files-results").is(":visible")).toBe(true); waitsForDone(promisify(FileSystem.getFileForPath(testPath + "/foo.html"), "write", "changed content")); }); runs(function () { expect($("#find-in-files-results").is(":visible")).toBe(false); }); }); it("should close the panel if a file is modified in memory", function () { openTestProjectCopy(defaultSourcePath); runs(function () { waitsForDone(CommandManager.execute(Commands.CMD_ADD_TO_WORKINGSET_AND_OPEN, { fullPath: testPath + "/foo.html" }), "open file"); }); openSearchBar(null, true); executeReplace("foo", "bar"); waitsFor(function () { return FindInFiles._searchDone; }, "search finished"); runs(function () { expect($("#find-in-files-results").is(":visible")).toBe(true); var doc = DocumentManager.getOpenDocumentForPath(testPath + "/foo.html"); expect(doc).toBeTruthy(); doc.replaceRange("", {line: 0, ch: 0}, {line: 1, ch: 0}); expect($("#find-in-files-results").is(":visible")).toBe(false); }); }); it("should close the panel if a document was open and modified before the search, but then the file was closed and changes dropped", function () { var doc; openTestProjectCopy(defaultSourcePath); runs(function () { waitsForDone(CommandManager.execute(Commands.CMD_ADD_TO_WORKINGSET_AND_OPEN, { fullPath: testPath + "/foo.html" }), "open file"); }); runs(function () { doc = DocumentManager.getOpenDocumentForPath(testPath + "/foo.html"); expect(doc).toBeTruthy(); doc.replaceRange("", {line: 0, ch: 0}, {line: 1, ch: 0}); }); openSearchBar(null, true); executeReplace("foo", "bar"); waitsFor(function () { return FindInFiles._searchDone; }, "search finished"); runs(function () { expect($("#find-in-files-results").is(":visible")).toBe(true); // We have to go through the dialog workflow for closing the file without saving changes, // because the "revert" behavior only happens in that workflow (it doesn't happen if you // do forceClose, since that's only intended as a shortcut for the end of a unit test). var closePromise = CommandManager.execute(Commands.FILE_CLOSE, { file: doc.file }), $dontSaveButton = $(".dialog-button[data-button-id='dontsave']"); expect($dontSaveButton.length).toBe(1); $dontSaveButton.click(); waitsForDone(closePromise); }); runs(function () { expect($("#find-in-files-results").is(":visible")).toBe(false); }); }); }); describe("Disclosure Arrows", function () { it("should expand/collapse items when clicked", function () { showSearchResults("foo", "bar"); runs(function () { $(".disclosure-triangle").click(); expect($(".disclosure-triangle").hasClass("expanded")).toBeFalsy(); // Check that all results are hidden expect($(".bottom-panel-table tr[data-file-index=0][data-match-index]:hidden").length).toEqual(7); expect($(".bottom-panel-table tr[data-file-index=1][data-match-index]:hidden").length).toEqual(4); $(".disclosure-triangle").click(); expect($(".disclosure-triangle").hasClass("expanded")).toBeTruthy(); expect($(".bottom-panel-table tr[data-file-index=0][data-match-index]:visible").length).toEqual(7); expect($(".bottom-panel-table tr[data-file-index=1][data-match-index]:visible").length).toEqual(4); }); }); }); }); }); }); });<|fim▁end|>
<|file_name|>burp2.py<|end_file_name|><|fim▁begin|># -*- coding: utf8 -*- """ .. module:: burpui.misc.backend.burp2 :platform: Unix :synopsis: Burp-UI burp2 backend module. .. moduleauthor:: Ziirish <hi+burpui@ziirish.me> """ import re import os import time import json from collections import OrderedDict from .burp1 import Burp as Burp1 from .interface import BUIbackend from .utils.burp2 import Monitor from .utils.constant import BURP_REVERSE_COUNTERS, BURP_STATUS_FORMAT_V2 from ..parser.burp2 import Parser from ...utils import human_readable as _hr, utc_to_local from ...exceptions import BUIserverException from ..._compat import to_unicode # Some functions are the same as in Burp1 backend class Burp(Burp1): """The :class:`burpui.misc.backend.burp2.Burp` class provides a consistent backend for ``burp-2`` servers. It extends the :class:`burpui.misc.backend.burp1.Burp` class because a few functions can be reused. The rest is just overrided. :param server: ``Burp-UI`` server instance in order to access logger and/or some global settings :type server: :class:`burpui.engines.server.BUIServer` :param conf: Configuration to use :type conf: :class:`burpui.config.BUIConfig` """ # backend version _vers = 2 # cache to store the guessed OS _os_cache = {} def __init__(self, server=None, conf=None): """ :param server: ``Burp-UI`` server instance in order to access logger and/or some global settings :type server: :class:`burpui.engines.server.BUIServer` :param conf: Configuration to use :type conf: :class:`burpui.config.BUIConfig` """ BUIbackend.__init__(self, server, conf) self.monitor = Monitor(self.burpbin, self.burpconfcli, self.app, self.timeout) self.batch_list_supported = self.monitor.batch_list_supported self.parser = Parser(self) self.logger.info(f"burp binary: {self.burpbin}") self.logger.info(f"strip binary: {self.stripbin}") self.logger.info(f"burp conf cli: {self.burpconfcli}") self.logger.info(f"burp conf srv: {self.burpconfsrv}") self.logger.info(f"command timeout: {self.timeout}") self.logger.info(f"tmpdir: {self.tmpdir}") self.logger.info(f"zip64: {self.zip64}") self.logger.info(f"includes: {self.includes}") self.logger.info(f"enforce: {self.enforce}") self.logger.info(f"revoke: {self.revoke}") self.logger.info(f"client version: {self.client_version}") self.logger.info(f"server version: {self.server_version}") @property def client_version(self): return self.monitor.client_version @property def server_version(self): return self.monitor.server_version @staticmethod def _human_st_mode(mode): """Convert the st_mode returned by stat in human readable (ls-like) format """ hur = "" if os.path.stat.S_ISREG(mode): hur = "-" elif os.path.stat.S_ISLNK(mode): hur = "l" elif os.path.stat.S_ISSOCK(mode): hur = "s" elif os.path.stat.S_ISDIR(mode): hur = "d" elif os.path.stat.S_ISBLK(mode): hur = "b" elif os.path.stat.S_ISFIFO(mode): hur = "p" elif os.path.stat.S_ISCHR(mode): hur = "c" else: hur = "-" for who in "USR", "GRP", "OTH": for perm in "R", "W", "X": if mode & getattr(os.path.stat, "S_I" + perm + who): hur += perm.lower() else: hur += "-" return hur def statistics(self, agent=None): """See :func:`burpui.misc.backend.interface.BUIbackend.statistics`""" return { "alive": self.monitor.alive, "server_version": self.server_version, "client_version": self.client_version, } def status(self, query="c:\n", timeout=None, cache=True, agent=None): """See :func:`burpui.misc.backend.interface.BUIbackend.status`""" return self.monitor.status(query, timeout, cache) def _get_backup_logs(self, number, client, forward=False, deep=False): """See :func:`burpui.misc.backend.interface.BUIbackend.get_backup_logs` """ ret = {} ret2 = {} if not client or not number: return ret query = self.status("c:{0}:b:{1}\n".format(client, number)) if not query: return ret try: logs = query["clients"][0]["backups"][0]["logs"]["list"] except KeyError: self.logger.warning("No logs found") return ret if "backup_stats" in logs: ret = self._parse_backup_stats(number, client, forward) if "backup" in logs and deep: ret2 = self._parse_backup_log(number, client) ret.update(ret2) ret["encrypted"] = False if "files_enc" in ret and ret["files_enc"]["total"] > 0: ret["encrypted"] = True return ret @staticmethod def _do_parse_backup_log(data, client): # tests ordered as the logs order ret = OrderedDict() ret["client_version"] = None ret["protocol"] = 1 ret["is_windows"] = False ret["server_version"] = None if not data: return ret try: log = data["clients"][0]["backups"][0]["logs"]["backup"] except KeyError: # Assume protocol 1 in all cases unless explicitly found Protocol 2 return ret # pre-compile regex since they'll be called on every log line regex = { "client_version": re.compile(r"Client version: (\d+\.\d+\.\d+)$"), "server_version": re.compile( r"WARNING: Client '{}' version '\d+\.\d+\.\d+' does not match server version '(\d+\.\d+\.\d+)'. An upgrade is recommended.$".format( client ) ), "protocol": re.compile(r"Protocol: (\d)$"), "is_windows": re.compile(r"Client is Windows$"), } expressions_list = list(ret.keys()) catching_expressions = ["client_version", "server_version", "protocol"] casting_expressions = { "protocol": int, } def __dummy(val): return val for line in log: expressions = expressions_list for expression in expressions: if expression in catching_expressions: catch = regex[expression].search(line) if catch: cast = casting_expressions.get(expression, __dummy) ret[expression] = cast(catch.group(1)) # don't search this expression twice expressions_list.remove(expression) break else: if expression in regex and regex[expression].search(line): ret[expression] = True # don't search this expression twice expressions_list.remove(expression) break return ret def _parse_backup_log(self, number, client): """The :func:`burpui.misc.backend.burp2.Burp._parse_backup_log` function helps you determine if the backup is protocol 2 or 1 and various useful details. <|fim▁hole|> :param client: Client name to work on :type client: str :returns: a dict with some useful details """ data = self.status("c:{0}:b:{1}:l:backup\n".format(client, number)) return self._do_parse_backup_log(data, client) def _do_parse_backup_stats( self, data, result, number, client, forward=False, agent=None ): ret = {} translate = { "time_start": "start", "time_end": "end", "time_taken": "duration", "bytes": "totsize", "bytes_received": "received", "bytes_estimated": "estimated_bytes", "files": "files", "files_encrypted": "files_enc", "directories": "dir", "soft_links": "softlink", "hard_links": "hardlink", "meta_data": "meta", "meta_data_encrypted": "meta_enc", "special_files": "special", "efs_files": "efs", "vss_headers": "vssheader", "vss_headers_encrypted": "vssheader_enc", "vss_footers": "vssfooter", "vss_footers_encrypted": "vssfooter_enc", "total": "total", "grand_total": "total", } counts = { "new": "count", "changed": "changed", "unchanged": "same", "deleted": "deleted", "total": "scanned", "scanned": "scanned", } single = [ "time_start", "time_end", "time_taken", "bytes_received", "bytes_estimated", "bytes", ] if not data: return ret try: back = data["clients"][0]["backups"][0] except KeyError: self.logger.warning("No backup found") return ret if "backup_stats" not in back["logs"]: self.logger.warning("No stats found for backup") return ret stats = None try: stats = json.loads("".join(back["logs"]["backup_stats"])) except: stats = back["logs"]["backup_stats"] if not stats: return ret # server was upgraded but backup comes from an older version if "counters" not in stats: return super(Burp, self)._parse_backup_stats( number, client, forward, stats, agent ) counters = stats["counters"] for counter in counters: name = counter["name"] if name in translate: name = translate[name] if counter["name"] in single: result[name] = counter["count"] else: result[name] = {} for (key, val) in counts.items(): if val in counter: result[name][key] = counter[val] else: result[name][key] = 0 if "start" in result and "end" in result: result["duration"] = result["end"] - result["start"] # convert utc timestamp to local # example: 1468850307 -> 1468857507 result["start"] = utc_to_local(result["start"]) result["end"] = utc_to_local(result["end"]) # Needed for graphs if "received" not in result: result["received"] = 1 return result def _parse_backup_stats(self, number, client, forward=False, agent=None): """The :func:`burpui.misc.backend.burp2.Burp._parse_backup_stats` function is used to parse the burp logs. :param number: Backup number to work on :type number: int :param client: Client name to work on :type client: str :param forward: Is the client name needed in later process :type forward: bool :param agent: What server to ask (only in multi-agent mode) :type agent: str :returns: Dict containing the backup log """ backup = {"os": self._guess_os(client), "number": int(number)} if forward: backup["name"] = client query = self.status("c:{0}:b:{1}:l:backup_stats\n".format(client, number)) return self._do_parse_backup_stats( query, backup, number, client, forward, agent ) # TODO: support old clients # NOTE: this should now be partly done since we fallback to the Burp1 code # def _parse_backup_log(self, fh, number, client=None, agent=None): # """ # parse_backup_log parses the log.gz of a given backup and returns a # dict containing different stats used to render the charts in the # reporting view # """ # return {} # def get_clients_report(self, clients, agent=None): def get_counters(self, name=None, agent=None): """See :func:`burpui.misc.backend.interface.BUIbackend.get_counters`""" ret = {} query = self.status("c:{0}\n".format(name), cache=False) # check the status returned something if not query: return ret try: client = query["clients"][0] except KeyError: self.logger.warning("Client not found") return ret return self._do_get_counters(client) def _do_get_counters(self, data): ret = {} client = data # check the client is currently backing-up if "run_status" not in client or client["run_status"] != "running": return ret backup = None phases = ["working", "finishing"] try: for child in client["children"]: if "action" in child and child["action"] == "backup": backup = child break except KeyError: for back in client["backups"]: if "flags" in back and any(x in back["flags"] for x in phases): backup = back break # check we found a working backup if not backup: return ret # list of single counters (type CNTR_SINGLE_FIELD in cntr.c) single = [ "bytes_estimated", "bytes", "bytes_received", "bytes_sent", "time_start", "time_end", "warnings", "errors", ] # translation table to be compatible with burp1 def translate(cntr): translate_table = {"bytes_estimated": "estimated_bytes"} try: return translate_table[cntr] except KeyError: return cntr for counter in backup.get("counters", {}): name = translate(counter["name"]) if counter["name"] not in single: # Prior burp-2.1.6 some counters are reversed # See https://github.com/grke/burp/commit/adeb3ad68477303991a393fa7cd36bc94ff6b429 if self.server_version and self.server_version < BURP_REVERSE_COUNTERS: ret[name] = [ counter["count"], counter["same"], # reversed counter["changed"], # reversed counter["deleted"], counter["scanned"], ] else: ret[name] = [ counter["count"], counter["changed"], counter["same"], counter["deleted"], counter["scanned"], ] else: ret[name] = counter["count"] if "phase" in backup: ret["phase"] = backup["phase"] else: for phase in phases: if phase in backup.get("flags", []): ret["phase"] = phase break if "bytes" not in ret: ret["bytes"] = 0 if set(["time_start", "estimated_bytes", "bytes"]) <= set(ret.keys()): try: diff = time.time() - int(ret["time_start"]) byteswant = int(ret["estimated_bytes"]) bytesgot = int(ret["bytes"]) bytespersec = bytesgot / diff bytesleft = byteswant - bytesgot ret["speed"] = bytespersec if bytespersec > 0: timeleft = int(bytesleft / bytespersec) ret["timeleft"] = timeleft else: ret["timeleft"] = -1 except: ret["timeleft"] = -1 try: ret["percent"] = round( float(ret["bytes"]) / float(ret["estimated_bytes"]) * 100 ) except (ZeroDivisionError, KeyError): # You know... division by 0 ret["percent"] = 0 return ret def is_backup_running(self, name=None, agent=None): """See :func:`burpui.misc.backend.interface.BUIbackend.is_backup_running` """ if not name: return False try: query = self.status("c:{0}\n".format(name)) except BUIserverException: return False return self._do_is_backup_running(query) def _do_is_backup_running(self, data): if data: try: return data["clients"][0]["run_status"] in ["running"] except KeyError: pass return False def is_one_backup_running(self, agent=None): """See :func:`burpui.misc.backend.interface.BUIbackend.is_one_backup_running` """ ret = [] try: clients = self.get_all_clients(last_attempt=False) except BUIserverException: return ret return self._do_is_one_backup_running(clients) def _do_is_one_backup_running(self, data): ret = [] for client in data: if client["state"] in ["running"]: ret.append(client["name"]) return ret def _status_human_readable(self, status): """The label has changed in burp2, we override it to be compatible with burp1's format :param status: The status returned by the burp2 server :type status: str :returns: burp1 status compatible """ if not status: return None if status == "c crashed": return "client crashed" if status == "s crashed": return "server crashed" return status def _get_last_backup(self, name, working=True): """Return the last backup of a given client :param name: Name of the client :type name: str :param working: Also return uncomplete backups :type working: bool :returns: The last backup """ try: clients = self.status("c:{}".format(name)) client = clients["clients"][0] i = 0 while True: ret = client["backups"][i] if not working and "working" in ret["flags"]: i += 1 continue return ret except (KeyError, TypeError, IndexError, BUIserverException): return None def _guess_os(self, name): """Return the OS of the given client based on the magic *os* label :param name: Name of the client :type name: str :returns: The guessed OS of the client :: grep label /etc/burp/clientconfdir/toto label = os: Darwin OS """ ret = "Unknown" if name in self._os_cache: return self._os_cache[name] labels = self.get_client_labels(name) OSES = [] for label in labels: if re.match("os:", label, re.IGNORECASE): _os = label.split(":", 1)[1].strip() if _os not in OSES: OSES.append(_os) if OSES: ret = OSES[-1] else: # more aggressive check last = self._get_last_backup(name, False) if last: try: tree = self.get_tree(name, last["number"]) if tree[0]["name"] != "/": ret = "Windows" else: ret = "Unix/Linux" except (IndexError, KeyError, BUIserverException): pass self._os_cache[name] = ret return ret def get_all_clients(self, agent=None, last_attempt=True): """See :func:`burpui.misc.backend.interface.BUIbackend.get_all_clients` """ ret = [] query = self.status() if not query or "clients" not in query: return ret clients = query["clients"] for client in clients: cli = {} cli["name"] = client["name"] cli["state"] = self._status_human_readable(client["run_status"]) infos = client["backups"] if cli["state"] in ["running"]: cli["last"] = "now" cli["last_attempt"] = "now" elif not infos: cli["last"] = "never" cli["last_attempt"] = "never" else: convert = True infos = infos[0] if self.server_version and self.server_version < BURP_STATUS_FORMAT_V2: cli["last"] = infos["timestamp"] convert = False # only do deep inspection when server >= BURP_STATUS_FORMAT_V2 elif self.deep_inspection: logs = self.get_backup_logs(infos["number"], client["name"]) cli["last"] = logs["start"] else: cli["last"] = utc_to_local(infos["timestamp"]) if last_attempt: last_backup = self._get_last_backup(client["name"]) if convert: cli["last_attempt"] = utc_to_local(last_backup["timestamp"]) else: cli["last_attempt"] = last_backup["timestamp"] ret.append(cli) return ret def get_client_status(self, name=None, agent=None): """See :func:`burpui.misc.backend.interface.BUIbackend.get_client_status`""" ret = {} if not name: return ret query = self.status("c:{0}\n".format(name)) if not query: return ret try: client = query["clients"][0] except (KeyError, IndexError): self.logger.warning("Client not found") return ret return self._do_get_client_status(client) def _do_get_client_status(self, data): ret = {} client = data ret["state"] = self._status_human_readable(client["run_status"]) infos = client["backups"] if ret["state"] in ["running"]: try: ret["phase"] = client["phase"] except KeyError: for child in client.get("children", []): if "action" in child and child["action"] == "backup": ret["phase"] = child["phase"] break counters = self._do_get_counters(client) if "percent" in counters: ret["percent"] = counters["percent"] else: ret["percent"] = 0 ret["last"] = "now" elif not infos: ret["last"] = "never" else: infos = infos[0] ret["last"] = infos["timestamp"] return ret def get_client(self, name=None, agent=None): """See :func:`burpui.misc.backend.interface.BUIbackend.get_client`""" return self.get_client_filtered(name) def get_client_filtered( self, name=None, limit=-1, page=None, start=None, end=None, agent=None ): """See :func:`burpui.misc.backend.interface.BUIbackend.get_client_filtered`""" ret = [] if not name: return ret query = self.status("c:{0}\n".format(name)) if not query: return ret try: backups = query["clients"][0]["backups"] except (KeyError, IndexError): self.logger.warning("Client not found") return ret for idx, backup in enumerate(backups): # skip the first elements if we are in a page if page and page > 1 and limit > 0: if idx < (page - 1) * limit: continue back = {} # skip running backups since data will be inconsistent if "flags" in backup and "working" in backup["flags"]: continue back["number"] = backup["number"] if "flags" in backup and "deletable" in backup["flags"]: back["deletable"] = True else: back["deletable"] = False back["date"] = backup["timestamp"] # skip backups before "start" if start and backup["timestamp"] < start: continue # skip backups after "end" if end and backup["timestamp"] > end: continue def __get_log(client, bkp, res): log = self.get_backup_logs(bkp["number"], client) try: res["encrypted"] = log["encrypted"] try: res["received"] = log["received"] except KeyError: res["received"] = 0 try: res["size"] = log["totsize"] except KeyError: res["size"] = 0 res["end"] = log["end"] # override date since the timestamp is odd res["date"] = log["start"] except Exception: self.logger.warning("Unable to parse logs") return None return res with_log = __get_log(name, backup, back) if with_log: ret.append(with_log) # stop after "limit" elements if page and page > 1 and limit > 0: if idx >= page * limit: break elif limit > 0 and idx >= limit: break # Here we need to reverse the array so the backups are sorted by num # ASC ret.reverse() return ret def is_backup_deletable(self, name=None, backup=None, agent=None): """Check if a given backup is deletable""" if not name or not backup: return False query = self.status("c:{0}:b:{1}\n".format(name, backup)) if not query: return False return self._do_is_backup_deletable(query) def _do_is_backup_deletable(self, data): query = data try: flags = query["clients"][0]["backups"][0]["flags"] return "deletable" in flags except KeyError: return False def _format_tree(self, data, top, level): ret = [] if not data: return ret try: backup = data["clients"][0]["backups"][0] except KeyError: return ret for entry in backup["browse"]["entries"]: data = {} base = None dirn = None if top == "*": base = os.path.basename(entry["name"]) dirn = os.path.dirname(entry["name"]) if entry["name"] == ".": continue else: data["name"] = base or entry["name"] data["mode"] = self._human_st_mode(entry["mode"]) if re.match("^(d|l)", data["mode"]): data["type"] = "d" data["folder"] = True else: data["type"] = "f" data["folder"] = False data["inodes"] = entry["nlink"] data["uid"] = entry["uid"] data["gid"] = entry["gid"] data["parent"] = dirn or top data["size"] = "{0:.1eM}".format(_hr(entry["size"])) data["date"] = entry["mtime"] data["fullname"] = ( os.path.join(top, entry["name"]) if top != "*" else entry["name"] ) data["level"] = level data["children"] = [] ret.append(data) return ret def get_tree(self, name=None, backup=None, root=None, level=-1, agent=None): """See :func:`burpui.misc.backend.interface.BUIbackend.get_tree`""" if not name or not backup: return [] if not root: top = "" else: top = to_unicode(root) # we know this operation may take a while so we arbitrary increase the # read timeout timeout = None if top == "*": timeout = max(self.timeout, 300) query = self.status("c:{0}:b:{1}:p:{2}\n".format(name, backup, top), timeout) return self._format_tree(query, top, level) def get_client_version(self, agent=None): """See :func:`burpui.misc.backend.interface.BUIbackend.get_client_version` """ return self.client_version def get_server_version(self, agent=None): """See :func:`burpui.misc.backend.interface.BUIbackend.get_server_version` """ if not self.server_version: self.status() return self.server_version def get_client_labels(self, client=None, agent=None): """See :func:`burpui.misc.backend.interface.BUIbackend.get_client_labels` """ ret = [] if not client: return ret # micro optimization since the status results are cached in memory for a # couple seconds, using the same global query and iterating over it # will be more efficient than filtering burp-side query = self.status("c:\n") if not query: return ret try: for cli in query["clients"]: if cli["name"] == client: return cli["labels"] except KeyError: return ret # Same as in Burp1 backend # def restore_files( # self, # name=None, # backup=None, # files=None, # strip=None, # archive='zip', # password=None, # agent=None): # def read_conf_cli(self, agent=None): # def read_conf_srv(self, agent=None): # def store_conf_cli(self, data, agent=None): # def store_conf_srv(self, data, agent=None): # def get_parser_attr(self, attr=None, agent=None):<|fim▁end|>
:param number: Backup number to work on :type number: int
<|file_name|>24.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
export { PhoneIp24 as default } from "../../";
<|file_name|>decodable.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! The compiler code necessary for `#[derive(Decodable)]`. See encodable.rs for more. use ast; use ast::{MetaItem, Item, Expr, MutMutable}; use codemap::Span; use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; use ext::deriving::generic::ty::*; use parse::token::InternedString; use parse::token; use ptr::P; pub fn expand_deriving_rustc_decodable<F>(cx: &mut ExtCtxt, span: Span, mitem: &MetaItem, item: &Item, push: F) where F: FnOnce(P<Item>), { expand_deriving_decodable_imp(cx, span, mitem, item, push, "rustc_serialize") } pub fn expand_deriving_decodable<F>(cx: &mut ExtCtxt, span: Span, mitem: &MetaItem, item: &Item, push: F) where F: FnOnce(P<Item>), { expand_deriving_decodable_imp(cx, span, mitem, item, push, "serialize") } fn expand_deriving_decodable_imp<F>(cx: &mut ExtCtxt, span: Span, mitem: &MetaItem, item: &Item, push: F, krate: &'static str) where F: FnOnce(P<Item>), { if !cx.use_std { // FIXME(#21880): lift this requirement. cx.span_err(span, "this trait cannot be derived with #![no_std]"); return; } let trait_def = TraitDef { span: span, attributes: Vec::new(), path: Path::new_(vec!(krate, "Decodable"), None, vec!(), true), additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), methods: vec!( MethodDef { name: "decode", generics: LifetimeBounds { lifetimes: Vec::new(), bounds: vec!(("__D", vec!(Path::new_( vec!(krate, "Decoder"), None, vec!(), true))))<|fim▁hole|> args: vec!(Ptr(box Literal(Path::new_local("__D")), Borrowed(None, MutMutable))), ret_ty: Literal(Path::new_( pathvec_std!(cx, core::result::Result), None, vec!(box Self, box Literal(Path::new_( vec!["__D", "Error"], None, vec![], false ))), true )), attributes: Vec::new(), combine_substructure: combine_substructure(box |a, b, c| { decodable_substructure(a, b, c, krate) }), } ), associated_types: Vec::new(), }; trait_def.expand(cx, mitem, item, push) } fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure, krate: &str) -> P<Expr> { let decoder = substr.nonself_args[0].clone(); let recurse = vec!(cx.ident_of(krate), cx.ident_of("Decodable"), cx.ident_of("decode")); let exprdecode = cx.expr_path(cx.path_global(trait_span, recurse)); // throw an underscore in front to suppress unused variable warnings let blkarg = cx.ident_of("_d"); let blkdecoder = cx.expr_ident(trait_span, blkarg); return match *substr.fields { StaticStruct(_, ref summary) => { let nfields = match *summary { Unnamed(ref fields) => fields.len(), Named(ref fields) => fields.len() }; let read_struct_field = cx.ident_of("read_struct_field"); let path = cx.path_ident(trait_span, substr.type_ident); let result = decode_static_fields(cx, trait_span, path, summary, |cx, span, name, field| { cx.expr_try(span, cx.expr_method_call(span, blkdecoder.clone(), read_struct_field, vec!(cx.expr_str(span, name), cx.expr_usize(span, field), exprdecode.clone()))) }); let result = cx.expr_ok(trait_span, result); cx.expr_method_call(trait_span, decoder, cx.ident_of("read_struct"), vec!( cx.expr_str(trait_span, token::get_ident(substr.type_ident)), cx.expr_usize(trait_span, nfields), cx.lambda_expr_1(trait_span, result, blkarg) )) } StaticEnum(_, ref fields) => { let variant = cx.ident_of("i"); let mut arms = Vec::new(); let mut variants = Vec::new(); let rvariant_arg = cx.ident_of("read_enum_variant_arg"); for (i, &(name, v_span, ref parts)) in fields.iter().enumerate() { variants.push(cx.expr_str(v_span, token::get_ident(name))); let path = cx.path(trait_span, vec![substr.type_ident, name]); let decoded = decode_static_fields(cx, v_span, path, parts, |cx, span, _, field| { let idx = cx.expr_usize(span, field); cx.expr_try(span, cx.expr_method_call(span, blkdecoder.clone(), rvariant_arg, vec!(idx, exprdecode.clone()))) }); arms.push(cx.arm(v_span, vec!(cx.pat_lit(v_span, cx.expr_usize(v_span, i))), decoded)); } arms.push(cx.arm_unreachable(trait_span)); let result = cx.expr_ok(trait_span, cx.expr_match(trait_span, cx.expr_ident(trait_span, variant), arms)); let lambda = cx.lambda_expr(trait_span, vec!(blkarg, variant), result); let variant_vec = cx.expr_vec(trait_span, variants); let variant_vec = cx.expr_addr_of(trait_span, variant_vec); let result = cx.expr_method_call(trait_span, blkdecoder, cx.ident_of("read_enum_variant"), vec!(variant_vec, lambda)); cx.expr_method_call(trait_span, decoder, cx.ident_of("read_enum"), vec!( cx.expr_str(trait_span, token::get_ident(substr.type_ident)), cx.lambda_expr_1(trait_span, result, blkarg) )) } _ => cx.bug("expected StaticEnum or StaticStruct in derive(Decodable)") }; } /// Create a decoder for a single enum variant/struct: /// - `outer_pat_path` is the path to this enum variant/struct /// - `getarg` should retrieve the `usize`-th field with name `@str`. fn decode_static_fields<F>(cx: &mut ExtCtxt, trait_span: Span, outer_pat_path: ast::Path, fields: &StaticFields, mut getarg: F) -> P<Expr> where F: FnMut(&mut ExtCtxt, Span, InternedString, usize) -> P<Expr>, { match *fields { Unnamed(ref fields) => { let path_expr = cx.expr_path(outer_pat_path); if fields.is_empty() { path_expr } else { let fields = fields.iter().enumerate().map(|(i, &span)| { getarg(cx, span, token::intern_and_get_ident(&format!("_field{}", i)[]), i) }).collect(); cx.expr_call(trait_span, path_expr, fields) } } Named(ref fields) => { // use the field's span to get nicer error messages. let fields = fields.iter().enumerate().map(|(i, &(name, span))| { let arg = getarg(cx, span, token::get_ident(name), i); cx.field_imm(span, name, arg) }).collect(); cx.expr_struct(trait_span, outer_pat_path, fields) } } }<|fim▁end|>
}, explicit_self: None,
<|file_name|>RelationPermissionsDao.java<|end_file_name|><|fim▁begin|>/** * The MIT License * * Copyright (C) 2015 Asterios Raptis * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND<|fim▁hole|> */ package de.alpharogroup.user.repositories; import org.springframework.stereotype.Repository; import de.alpharogroup.db.dao.jpa.JpaEntityManagerDao; import de.alpharogroup.user.entities.RelationPermissions; @Repository("relationPermissionsDao") public class RelationPermissionsDao extends JpaEntityManagerDao<RelationPermissions, Integer> { /** * The serialVersionUID. */ private static final long serialVersionUID = 1L; }<|fim▁end|>
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
<|file_name|>_gatebody.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright 2017 IBM RESEARCH. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= """ Node for an OPENQASM custom gate body. """ from ._node import Node class GateBody(Node): """Node for an OPENQASM custom gate body. children is a list of gate operation nodes. These are one of barrier, custom_unitary, U, or CX. """ def __init__(self, children): """Create the gatebody node.""" Node.__init__(self, 'gate_body', children, None) def qasm(self, prec=15): """Return the corresponding OPENQASM string.""" string = "" for children in self.children: string += " " + children.qasm(prec) + "\n" return string def calls(self): """Return a list of custom gate names in this gate body.""" lst = [] for children in self.children:<|fim▁hole|><|fim▁end|>
if children.type == "custom_unitary": lst.append(children.name) return lst
<|file_name|>sriov_nic_agent.py<|end_file_name|><|fim▁begin|># Copyright 2014 Mellanox Technologies, Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import socket import sys import time import eventlet eventlet.monkey_patch() from oslo_config import cfg from oslo_log import log as logging import oslo_messaging from oslo_service import loopingcall from neutron.agent.l2.extensions import manager as ext_manager from neutron.agent import rpc as agent_rpc from neutron.agent import securitygroups_rpc as sg_rpc from neutron.common import config as common_config from neutron.common import constants as n_constants from neutron.common import topics from neutron.common import utils as n_utils from neutron import context from neutron.i18n import _LE, _LI, _LW from neutron.plugins.ml2.drivers.mech_sriov.agent.common import config from neutron.plugins.ml2.drivers.mech_sriov.agent.common \ import exceptions as exc from neutron.plugins.ml2.drivers.mech_sriov.agent import eswitch_manager as esm LOG = logging.getLogger(__name__) class SriovNicSwitchRpcCallbacks(sg_rpc.SecurityGroupAgentRpcCallbackMixin): # Set RPC API version to 1.0 by default. # history # 1.1 Support Security Group RPC target = oslo_messaging.Target(version='1.1') def __init__(self, context, agent, sg_agent): super(SriovNicSwitchRpcCallbacks, self).__init__() self.context = context self.agent = agent self.sg_agent = sg_agent def port_update(self, context, **kwargs): LOG.debug("port_update received") port = kwargs.get('port') # Put the port mac address in the updated_devices set. # Do not store port details, as if they're used for processing # notifications there is no guarantee the notifications are # processed in the same order as the relevant API requests. mac = port['mac_address'] pci_slot = None if port.get('binding:profile'): pci_slot = port['binding:profile'].get('pci_slot') if pci_slot: self.agent.updated_devices.add((mac, pci_slot)) LOG.debug("port_update RPC received for port: %(id)s with MAC " "%(mac)s and PCI slot %(pci_slot)s slot", {'id': port['id'], 'mac': mac, 'pci_slot': pci_slot}) else: LOG.debug("No PCI Slot for port %(id)s with MAC %(mac)s; " "skipping", {'id': port['id'], 'mac': mac, 'pci_slot': pci_slot}) class SriovNicSwitchAgent(object): def __init__(self, physical_devices_mappings, exclude_devices, polling_interval): self.polling_interval = polling_interval self.conf = cfg.CONF self.setup_eswitch_mgr(physical_devices_mappings, exclude_devices) configurations = {'device_mappings': physical_devices_mappings} self.agent_state = { 'binary': 'neutron-sriov-nic-agent', 'host': self.conf.host, 'topic': n_constants.L2_AGENT_TOPIC, 'configurations': configurations, 'agent_type': n_constants.AGENT_TYPE_NIC_SWITCH, 'start_flag': True} # Stores port update notifications for processing in the main loop self.updated_devices = set() self.mac_to_port_id_mapping = {} self.context = context.get_admin_context_without_session() self.plugin_rpc = agent_rpc.PluginApi(topics.PLUGIN) self.sg_plugin_rpc = sg_rpc.SecurityGroupServerRpcApi(topics.PLUGIN) self.sg_agent = sg_rpc.SecurityGroupAgentRpc(self.context, self.sg_plugin_rpc) self._setup_rpc() self.ext_manager = self._create_agent_extension_manager( self.connection) # The initialization is complete; we can start receiving messages self.connection.consume_in_threads() # Initialize iteration counter self.iter_num = 0 def _setup_rpc(self): self.agent_id = 'nic-switch-agent.%s' % socket.gethostname() LOG.info(_LI("RPC agent_id: %s"), self.agent_id) self.topic = topics.AGENT self.state_rpc = agent_rpc.PluginReportStateAPI(topics.PLUGIN) # RPC network init # Handle updates from service self.endpoints = [SriovNicSwitchRpcCallbacks(self.context, self, self.sg_agent)] # Define the listening consumers for the agent consumers = [[topics.PORT, topics.UPDATE], [topics.NETWORK, topics.DELETE], [topics.SECURITY_GROUP, topics.UPDATE]] self.connection = agent_rpc.create_consumers(self.endpoints, self.topic, consumers, start_listening=False) report_interval = cfg.CONF.AGENT.report_interval if report_interval: heartbeat = loopingcall.FixedIntervalLoopingCall( self._report_state) heartbeat.start(interval=report_interval) def _report_state(self): try: devices = len(self.eswitch_mgr.get_assigned_devices_info()) self.agent_state.get('configurations')['devices'] = devices self.state_rpc.report_state(self.context, self.agent_state) self.agent_state.pop('start_flag', None) except Exception: LOG.exception(_LE("Failed reporting state!")) def _create_agent_extension_manager(self, connection): ext_manager.register_opts(self.conf) mgr = ext_manager.AgentExtensionsManager(self.conf) mgr.initialize(connection, 'sriov') return mgr def setup_eswitch_mgr(self, device_mappings, exclude_devices={}): self.eswitch_mgr = esm.ESwitchManager() self.eswitch_mgr.discover_devices(device_mappings, exclude_devices) def scan_devices(self, registered_devices, updated_devices): curr_devices = self.eswitch_mgr.get_assigned_devices_info() device_info = {} device_info['current'] = curr_devices device_info['added'] = curr_devices - registered_devices # we don't want to process updates for devices that don't exist device_info['updated'] = updated_devices & curr_devices # we need to clean up after devices are removed device_info['removed'] = registered_devices - curr_devices return device_info def _device_info_has_changes(self, device_info): return (device_info.get('added') or device_info.get('updated') or device_info.get('removed')) def process_network_devices(self, device_info): resync_a = False resync_b = False self.sg_agent.prepare_devices_filter(device_info.get('added')) if device_info.get('updated'): self.sg_agent.refresh_firewall() # Updated devices are processed the same as new ones, as their # admin_state_up may have changed. The set union prevents duplicating # work when a device is new and updated in the same polling iteration. devices_added_updated = (set(device_info.get('added')) | set(device_info.get('updated'))) if devices_added_updated: resync_a = self.treat_devices_added_updated(devices_added_updated) if device_info.get('removed'): resync_b = self.treat_devices_removed(device_info['removed']) # If one of the above operations fails => resync with plugin return (resync_a | resync_b) def treat_device(self, device, pci_slot, admin_state_up, spoofcheck=True): if self.eswitch_mgr.device_exists(device, pci_slot): try: self.eswitch_mgr.set_device_spoofcheck(device, pci_slot, spoofcheck) except Exception: LOG.warning(_LW("Failed to set spoofcheck for device %s"), device) LOG.info(_LI("Device %(device)s spoofcheck %(spoofcheck)s"), {"device": device, "spoofcheck": spoofcheck}) try: self.eswitch_mgr.set_device_state(device, pci_slot, admin_state_up) except exc.SriovNicError: LOG.exception(_LE("Failed to set device %s state"), device) return if admin_state_up: # update plugin about port status self.plugin_rpc.update_device_up(self.context, device, self.agent_id, cfg.CONF.host) else: self.plugin_rpc.update_device_down(self.context, device, self.agent_id, cfg.CONF.host) else: LOG.info(_LI("No device with MAC %s defined on agent."), device) def treat_devices_added_updated(self, devices_info): try: macs_list = set([device_info[0] for device_info in devices_info]) devices_details_list = self.plugin_rpc.get_devices_details_list( self.context, macs_list, self.agent_id) except Exception as e: LOG.debug("Unable to get port details for devices " "with MAC addresses %(devices)s: %(e)s", {'devices': macs_list, 'e': e}) # resync is needed return True for device_details in devices_details_list: device = device_details['device'] LOG.debug("Port with MAC address %s is added", device) if 'port_id' in device_details: LOG.info(_LI("Port %(device)s updated. Details: %(details)s"), {'device': device, 'details': device_details}) port_id = device_details['port_id'] self.mac_to_port_id_mapping[device] = port_id profile = device_details['profile'] spoofcheck = device_details.get('port_security_enabled', True) self.treat_device(device, profile.get('pci_slot'), device_details['admin_state_up'], spoofcheck) self.ext_manager.handle_port(self.context, device_details) else: LOG.info(_LI("Device with MAC %s not defined on plugin"), device) return False def treat_devices_removed(self, devices): resync = False for device in devices: mac, pci_slot = device LOG.info(_LI("Removing device with MAC address %(mac)s and " "PCI slot %(pci_slot)s"), {'mac': mac, 'pci_slot': pci_slot}) try: port_id = self.mac_to_port_id_mapping.get(mac) if port_id: profile = {'pci_slot': pci_slot}<|fim▁hole|> port = {'port_id': port_id, 'device': mac, 'profile': profile} self.ext_manager.delete_port(self.context, port) del self.mac_to_port_id_mapping[mac] else: LOG.warning(_LW("port_id to device with MAC " "%s not found"), mac) dev_details = self.plugin_rpc.update_device_down(self.context, mac, self.agent_id, cfg.CONF.host) except Exception as e: LOG.debug("Removing port failed for device with MAC address " "%(mac)s and PCI slot %(pci_slot)s due to %(exc)s", {'mac': mac, 'pci_slot': pci_slot, 'exc': e}) resync = True continue if dev_details['exists']: LOG.info(_LI("Port with MAC %(mac)s and PCI slot " "%(pci_slot)s updated."), {'mac': mac, 'pci_slot': pci_slot}) else: LOG.debug("Device with MAC %(mac)s and PCI slot " "%(pci_slot)s not defined on plugin", {'mac': mac, 'pci_slot': pci_slot}) return resync def daemon_loop(self): sync = True devices = set() LOG.info(_LI("SRIOV NIC Agent RPC Daemon Started!")) while True: start = time.time() LOG.debug("Agent rpc_loop - iteration:%d started", self.iter_num) if sync: LOG.info(_LI("Agent out of sync with plugin!")) devices.clear() sync = False device_info = {} # Save updated devices dict to perform rollback in case # resync would be needed, and then clear self.updated_devices. # As the greenthread should not yield between these # two statements, this will should be thread-safe. updated_devices_copy = self.updated_devices self.updated_devices = set() try: device_info = self.scan_devices(devices, updated_devices_copy) if self._device_info_has_changes(device_info): LOG.debug("Agent loop found changes! %s", device_info) # If treat devices fails - indicates must resync with # plugin sync = self.process_network_devices(device_info) devices = device_info['current'] except Exception: LOG.exception(_LE("Error in agent loop. Devices info: %s"), device_info) sync = True # Restore devices that were removed from this set earlier # without overwriting ones that may have arrived since. self.updated_devices |= updated_devices_copy # sleep till end of polling interval elapsed = (time.time() - start) if (elapsed < self.polling_interval): time.sleep(self.polling_interval - elapsed) else: LOG.debug("Loop iteration exceeded interval " "(%(polling_interval)s vs. %(elapsed)s)!", {'polling_interval': self.polling_interval, 'elapsed': elapsed}) self.iter_num = self.iter_num + 1 class SriovNicAgentConfigParser(object): def __init__(self): self.device_mappings = {} self.exclude_devices = {} def parse(self): """Parses device_mappings and exclude_devices. Parse and validate the consistency in both mappings """ self.device_mappings = n_utils.parse_mappings( cfg.CONF.SRIOV_NIC.physical_device_mappings) self.exclude_devices = config.parse_exclude_devices( cfg.CONF.SRIOV_NIC.exclude_devices) self._validate() def _validate(self): """Validate configuration. Validate that network_device in excluded_device exists in device mappings """ dev_net_set = set(self.device_mappings.values()) for dev_name in self.exclude_devices.keys(): if dev_name not in dev_net_set: raise ValueError(_("Device name %(dev_name)s is missing from " "physical_device_mappings") % {'dev_name': dev_name}) def main(): common_config.init(sys.argv[1:]) common_config.setup_logging() try: config_parser = SriovNicAgentConfigParser() config_parser.parse() device_mappings = config_parser.device_mappings exclude_devices = config_parser.exclude_devices except ValueError: LOG.exception(_LE("Failed on Agent configuration parse. " "Agent terminated!")) raise SystemExit(1) LOG.info(_LI("Physical Devices mappings: %s"), device_mappings) LOG.info(_LI("Exclude Devices: %s"), exclude_devices) polling_interval = cfg.CONF.AGENT.polling_interval try: agent = SriovNicSwitchAgent(device_mappings, exclude_devices, polling_interval) except exc.SriovNicError: LOG.exception(_LE("Agent Initialization Failed")) raise SystemExit(1) # Start everything. LOG.info(_LI("Agent initialized successfully, now running... ")) agent.daemon_loop() if __name__ == '__main__': main()<|fim▁end|>
<|file_name|>gobspy.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- from gobspy import main <|fim▁hole|><|fim▁end|>
main()
<|file_name|>transient_detector_unittest.cc<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license<|fim▁hole|> * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #include "webrtc/modules/audio_processing/transient/transient_detector.h" #include <sstream> #include <string> #include "testing/gtest/include/gtest/gtest.h" #include "webrtc/base/scoped_ptr.h" #include "webrtc/modules/audio_processing/transient/common.h" #include "webrtc/modules/audio_processing/transient/file_utils.h" #include "webrtc/system_wrappers/include/file_wrapper.h" #include "webrtc/test/testsupport/fileutils.h" #include "webrtc/test/testsupport/gtest_disable.h" #include "webrtc/typedefs.h" namespace webrtc { static const int kSampleRatesHz[] = {ts::kSampleRate8kHz, ts::kSampleRate16kHz, ts::kSampleRate32kHz, ts::kSampleRate48kHz}; static const size_t kNumberOfSampleRates = sizeof(kSampleRatesHz) / sizeof(*kSampleRatesHz); // This test is for the correctness of the transient detector. // Checks the results comparing them with the ones stored in the detect files in // the directory: resources/audio_processing/transient/ // The files contain all the results in double precision (Little endian). // The audio files used with different sample rates are stored in the same // directory. TEST(TransientDetectorTest, DISABLED_ON_IOS(CorrectnessBasedOnFiles)) { for (size_t i = 0; i < kNumberOfSampleRates; ++i) { int sample_rate_hz = kSampleRatesHz[i]; // Prepare detect file. std::stringstream detect_file_name; detect_file_name << "audio_processing/transient/detect" << (sample_rate_hz / 1000) << "kHz"; rtc::scoped_ptr<FileWrapper> detect_file(FileWrapper::Create()); detect_file->OpenFile( test::ResourcePath(detect_file_name.str(), "dat").c_str(), true, // Read only. false, // No loop. false); // No text. bool file_opened = detect_file->Open(); ASSERT_TRUE(file_opened) << "File could not be opened.\n" << detect_file_name.str().c_str(); // Prepare audio file. std::stringstream audio_file_name; audio_file_name << "audio_processing/transient/audio" << (sample_rate_hz / 1000) << "kHz"; rtc::scoped_ptr<FileWrapper> audio_file(FileWrapper::Create()); audio_file->OpenFile( test::ResourcePath(audio_file_name.str(), "pcm").c_str(), true, // Read only. false, // No loop. false); // No text. // Create detector. TransientDetector detector(sample_rate_hz); const size_t buffer_length = sample_rate_hz * ts::kChunkSizeMs / 1000; rtc::scoped_ptr<float[]> buffer(new float[buffer_length]); const float kTolerance = 0.02f; size_t frames_read = 0; while (ReadInt16FromFileToFloatBuffer(audio_file.get(), buffer_length, buffer.get()) == buffer_length) { ++frames_read; float detector_value = detector.Detect(buffer.get(), buffer_length, NULL, 0); double file_value; ASSERT_EQ(1u, ReadDoubleBufferFromFile(detect_file.get(), 1, &file_value)) << "Detect test file is malformed.\n"; // Compare results with data from the matlab test file. EXPECT_NEAR(file_value, detector_value, kTolerance) << "Frame: " << frames_read; } detect_file->CloseFile(); audio_file->CloseFile(); } } } // namespace webrtc<|fim▁end|>
* that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found
<|file_name|>fablib.py<|end_file_name|><|fim▁begin|>import os from os import path from datetime import datetime import getpass import re import time from fabric.context_managers import cd, hide, settings from fabric.operations import require, prompt, get, run, sudo, local from fabric.state import env from fabric.contrib import files from fabric import utils def _setup_paths(project_settings): # first merge in variables from project_settings - but ignore __doc__ etc user_settings = [x for x in vars(project_settings).keys() if not x.startswith('__')] for setting in user_settings: env[setting] = vars(project_settings)[setting] # allow for project_settings having set up some of these differently env.setdefault('verbose', False) env.setdefault('use_sudo', True) env.setdefault('cvs_rsh', 'CVS_RSH="ssh"') env.setdefault('default_branch', {'production': 'master', 'staging': 'master'}) env.setdefault('server_project_home', path.join(env.server_home, env.project_name)) # TODO: change dev -> current env.setdefault('vcs_root_dir', path.join(env.server_project_home, 'dev')) env.setdefault('prev_root', path.join(env.server_project_home, 'previous')) env.setdefault('next_dir', path.join(env.server_project_home, 'next')) env.setdefault('dump_dir', path.join(env.server_project_home, 'dbdumps')) env.setdefault('deploy_dir', path.join(env.vcs_root_dir, 'deploy')) env.setdefault('settings', '%(project_name)s.settings' % env) if env.project_type == "django": env.setdefault('relative_django_dir', env.project_name) env.setdefault('relative_django_settings_dir', env['relative_django_dir']) env.setdefault('relative_ve_dir', path.join(env['relative_django_dir'], '.ve')) # now create the absolute paths of everything else env.setdefault('django_dir', path.join(env['vcs_root_dir'], env['relative_django_dir'])) env.setdefault('django_settings_dir', path.join(env['vcs_root_dir'], env['relative_django_settings_dir'])) env.setdefault('ve_dir', path.join(env['vcs_root_dir'], env['relative_ve_dir'])) env.setdefault('manage_py', path.join(env['django_dir'], 'manage.py')) # local_tasks_bin is the local copy of tasks.py # this should be the copy from where ever fab.py is being run from ... if 'DEPLOYDIR' in os.environ: env.setdefault('local_tasks_bin', path.join(os.environ['DEPLOYDIR'], 'tasks.py')) else: env.setdefault('local_tasks_bin', path.join(path.dirname(__file__), 'tasks.py')) # valid environments - used for require statements in fablib env.valid_envs = env.host_list.keys() def _linux_type(): if 'linux_type' not in env: # work out if we're based on redhat or centos # TODO: look up stackoverflow question about this. if files.exists('/etc/redhat-release'): env.linux_type = 'redhat' elif files.exists('/etc/debian_version'): env.linux_type = 'debian' else: # TODO: should we print a warning here? utils.abort("could not determine linux type of server we're deploying to") return env.linux_type def _get_python(): if 'python_bin' not in env: python26 = path.join('/', 'usr', 'bin', 'python2.6') if files.exists(python26): env.python_bin = python26 else: env.python_bin = path.join('/', 'usr', 'bin', 'python') return env.python_bin def _get_tasks_bin(): if 'tasks_bin' not in env: env.tasks_bin = path.join(env.deploy_dir, 'tasks.py') return env.tasks_bin def _tasks(tasks_args, verbose=False): tasks_cmd = _get_tasks_bin() if env.verbose or verbose: tasks_cmd += ' -v' sudo_or_run(tasks_cmd + ' ' + tasks_args) def _get_svn_user_and_pass(): if 'svnuser' not in env or len(env.svnuser) == 0: # prompt user for username prompt('Enter SVN username:', 'svnuser') if 'svnpass' not in env or len(env.svnpass) == 0: # prompt user for password env.svnpass = getpass.getpass('Enter SVN password:') def verbose(verbose=True): """Set verbose output""" env.verbose = verbose def deploy_clean(revision=None): """ delete the entire install and do a clean install """ if env.environment == 'production': utils.abort('do not delete the production environment!!!') require('server_project_home', provided_by=env.valid_envs) # TODO: dump before cleaning database? with settings(warn_only=True): webserver_cmd('stop') clean_db() clean_files() deploy(revision) def clean_files(): sudo_or_run('rm -rf %s' % env.server_project_home) def _create_dir_if_not_exists(path): if not files.exists(path): sudo_or_run('mkdir -p %s' % path) def deploy(revision=None, keep=None): """ update remote host environment (virtualenv, deploy, update) It takes two arguments: * revision is the VCS revision ID to checkout (if not specified then the latest will be checked out) * keep is the number of old versions to keep around for rollback (default 5)""" require('server_project_home', provided_by=env.valid_envs) check_for_local_changes() _create_dir_if_not_exists(env.server_project_home) # TODO: check if our live site is in <sitename>/dev/ - if so # move it to <sitename>/current/ and make a link called dev/ to # the current/ directory # TODO: if dev/ is found to be a link, ask the user if the apache config # has been updated to point at current/ - and if so then delete dev/ # _migrate_from_dev_to_current() create_copy_for_next() checkout_or_update(in_next=True, revision=revision) # remove any old pyc files - essential if the .py file has been removed if env.project_type == "django": rm_pyc_files(path.join(env.next_dir, env.relative_django_dir)) # create the deploy virtualenv if we use it create_deploy_virtualenv(in_next=True) # we only have to disable this site after creating the rollback copy # (do this so that apache carries on serving other sites on this server # and the maintenance page for this vhost) downtime_start = datetime.now() link_webserver_conf(maintenance=True) with settings(warn_only=True): webserver_cmd('reload') next_to_current_to_rollback() # Use tasks.py deploy:env to actually do the deployment, including # creating the virtualenv if it thinks it necessary, ignoring # env.use_virtualenv as tasks.py knows nothing about it. _tasks('deploy:' + env.environment) # bring this vhost back in, reload the webserver and touch the WSGI # handler (which reloads the wsgi app) link_webserver_conf() webserver_cmd('reload') downtime_end = datetime.now() touch_wsgi() delete_old_rollback_versions(keep) if env.environment == 'production': setup_db_dumps() _report_downtime(downtime_start, downtime_end) def _report_downtime(downtime_start, downtime_end): downtime = downtime_end - downtime_start utils.puts("Downtime lasted for %.1f seconds" % downtime.total_seconds()) utils.puts("(Downtime started at %s and finished at %s)" % (downtime_start, downtime_end)) def set_up_celery_daemon(): require('vcs_root_dir', 'project_name', provided_by=env) for command in ('celerybeat', 'celeryd'): command_project = command + '_' + env.project_name celery_run_script_location = path.join(env['vcs_root_dir'], 'celery', 'init', command) celery_run_script = path.join('/etc', 'init.d', command_project) celery_configuration_location = path.join(env['vcs_root_dir'], 'celery', 'config', command) celery_configuration_destination = path.join('/etc', 'default', command_project) sudo_or_run(" ".join(['cp', celery_run_script_location, celery_run_script])) sudo_or_run(" ".join(['chmod', '+x', celery_run_script])) sudo_or_run(" ".join(['cp', celery_configuration_location, celery_configuration_destination])) sudo_or_run('/etc/init.d/%s restart' % command_project) def clean_old_celery(): """As the scripts have moved location you might need to get rid of old versions of celery.""" require('vcs_root_dir', provided_by=env) for command in ('celerybeat', 'celeryd'): celery_run_script = path.join('/etc', 'init.d', command) if files.exists(celery_run_script): sudo_or_run('/etc/init.d/%s stop' % command) sudo_or_run('rm %s' % celery_run_script) celery_configuration_destination = path.join('/etc', 'default', command) if files.exists(celery_configuration_destination): sudo_or_run('rm %s' % celery_configuration_destination) def create_copy_for_next(): """Copy the current version to "next" so that we can do stuff like the VCS update and virtualenv update without taking the site offline""" # TODO: check if next directory already exists # if it does maybe there was an aborted deploy, or maybe someone else is # deploying. Either way, stop and ask the user what to do. if files.exists(env.next_dir): utils.warn('The "next" directory already exists. Maybe a previous ' 'deploy failed, or maybe another deploy is in progress.') continue_anyway = prompt('Would you like to continue anyway ' '(and delete the current next dir)? [no/yes]', default='no', validate='^no|yes$') if continue_anyway.lower() != 'yes': utils.abort("Aborting deploy - try again when you're certain what to do.") sudo_or_run('rm -rf %s' % env.next_dir) # if this is the initial deploy, the vcs_root_dir won't exist yet. In that # case, don't create it (otherwise the checkout code will get confused). if files.exists(env.vcs_root_dir): # cp -a - amongst other things this preserves links and timestamps # so the compare that bootstrap.py does to see if the virtualenv # needs an update should still work. sudo_or_run('cp -a %s %s' % (env.vcs_root_dir, env.next_dir)) def next_to_current_to_rollback(): """Move the current version to the previous directory (so we can roll back to it, move the next version to the current version (so it will be used) and do a db dump in the rollback directory.""" # create directory for it # if this is the initial deploy, the vcs_root_dir won't exist yet. In that # case just skip the rollback version. if files.exists(env.vcs_root_dir): _create_dir_if_not_exists(env.prev_root) prev_dir = path.join(env.prev_root, time.strftime("%Y-%m-%d_%H-%M-%S")) sudo_or_run('mv %s %s' % (env.vcs_root_dir, prev_dir)) _dump_db_in_previous_directory(prev_dir) sudo_or_run('mv %s %s' % (env.next_dir, env.vcs_root_dir)) def create_copy_for_rollback(): """Move the current version to the previous directory (so we can roll back to it, move the next version to the current version (so it will be used) and do a db dump in the rollback directory.""" # create directory for it prev_dir = path.join(env.prev_root, time.strftime("%Y-%m-%d_%H-%M-%S")) _create_dir_if_not_exists(prev_dir) # cp -a sudo_or_run('cp %s %s' % (env.vcs_root_dir, prev_dir)) _dump_db_in_previous_directory(prev_dir) def _dump_db_in_previous_directory(prev_dir): require('django_settings_dir', provided_by=env.valid_envs) if (env.project_type == 'django' and files.exists(path.join(env.django_settings_dir, 'local_settings.py'))): # dump database (provided local_settings has been set up properly) with cd(prev_dir): # just in case there is some other reason why the dump fails with settings(warn_only=True): _tasks('dump_db') def delete_old_rollback_versions(keep=None): """Delete old rollback directories, keeping the last "keep" (default 5)".""" require('prev_root', provided_by=env.valid_envs) # the -1 argument ensures one directory per line prev_versions = run('ls -1 ' + env.prev_root).split('\n') if keep is None: if 'versions_to_keep' in env: keep = env.versions_to_keep else: keep = 5 else: keep = int(keep) if keep == 0: return versions_to_keep = -1 * int(keep) prev_versions_to_delete = prev_versions[:versions_to_keep] for version_to_delete in prev_versions_to_delete: sudo_or_run('rm -rf ' + path.join( env.prev_root, version_to_delete.strip())) def list_previous(): """List the previous versions available to rollback to.""" # could also determine the VCS revision number require('prev_root', provided_by=env.valid_envs) run('ls ' + env.prev_root) def rollback(version='last', migrate=False, restore_db=False): """Redeploy one of the old versions. Arguments are 'version', 'migrate' and 'restore_db': * if version is 'last' (the default) then the most recent version will be restored. Otherwise specify by timestamp - use list_previous to get a list of available versions. * if restore_db is True, then the database will be restored as well as the code. The default is False. * if migrate is True, then fabric will attempt to work out the new and old migration status and run the migrations to match the database versions. The default is False Note that migrate and restore_db cannot both be True.""" require('prev_root', 'vcs_root_dir', provided_by=env.valid_envs) if migrate and restore_db: utils.abort('rollback cannot do both migrate and restore_db') if migrate: utils.abort("rollback: haven't worked out how to do migrate yet ...") if version == 'last': # get the latest directory from prev_dir # list directories in env.prev_root, use last one version = run('ls ' + env.prev_root).split('\n')[-1] # check version specified exists rollback_dir = path.join(env.prev_root, version) if not files.exists(rollback_dir): utils.abort("Cannot rollback to version %s, it does not exist, use list_previous to see versions available" % version) webserver_cmd("stop") # first copy this version out of the way create_copy_for_rollback() if migrate: # run the south migrations back to the old version # but how to work out what the old version is?? pass if restore_db: # feed the dump file into mysql command with cd(rollback_dir): _tasks('load_dbdump') # delete everything - don't want stray files left over sudo_or_run('rm -rf %s' % env.vcs_root_dir) # cp -a from rollback_dir to vcs_root_dir sudo_or_run('cp -a %s %s' % (rollback_dir, env.vcs_root_dir)) webserver_cmd("start") def local_test(): """ run the django tests on the local machine """ require('project_name') with cd(path.join("..", env.project_name)): local("python " + env.test_cmd, capture=False) def remote_test(): """ run the django tests remotely - staging only """ require('django_dir', provided_by=env.valid_envs) if env.environment == 'production': utils.abort('do not run tests on the production environment') with cd(env.django_dir): sudo_or_run(_get_python() + env.test_cmd) def version(): """ return the deployed VCS revision and commit comments""" require('server_project_home', 'repo_type', 'vcs_root_dir', 'repository', provided_by=env.valid_envs) if env.repo_type == "git": with cd(env.vcs_root_dir): sudo_or_run('git log | head -5') elif env.repo_type == "svn": _get_svn_user_and_pass() with cd(env.vcs_root_dir): with hide('running'): cmd = 'svn log --non-interactive --username %s --password %s | head -4' % (env.svnuser, env.svnpass) sudo_or_run(cmd) else: utils.abort('Unsupported repo type: %s' % (env.repo_type)) def _check_git_branch(): env.revision = None with cd(env.vcs_root_dir): with settings(warn_only=True): # get branch information server_branch = sudo_or_run('git rev-parse --abbrev-ref HEAD') server_commit = sudo_or_run('git rev-parse HEAD') local_branch = local('git rev-parse --abbrev-ref HEAD', capture=True) default_branch = env.default_branch.get(env.environment, 'master') git_branch_r = sudo_or_run('git branch --color=never -r') git_branch_r = git_branch_r.split('\n') branches = [b.split('/')[-1].strip() for b in git_branch_r if 'HEAD' not in b] # if all branches are the same, just stick to this branch if server_branch == local_branch == default_branch: env.revision = server_branch else: if server_branch == 'HEAD': # not on a branch - just print a warning print 'The server git repository is not on a branch' print 'Branch mismatch found:' print '* %s is the default branch for this server' % default_branch if server_branch == 'HEAD': print '* %s is the commit checked out on the server.' % server_commit else: print '* %s is the branch currently checked out on the server' % server_branch print '* %s is the current branch of your local git repo' % local_branch print '' print 'Available branches are:' for branch in branches: print '* %s' % branch print '' escaped_branches = [re.escape(b) for b in branches] validate_branch = '^' + '|'.join(escaped_branches) + '$' env.revision = prompt('Which branch would you like to use on the server? (or hit Ctrl-C to exit)', default=default_branch, validate=validate_branch) def check_for_local_changes(): """ check if there are local changes on the remote server """ require('repo_type', 'vcs_root_dir', provided_by=env.valid_envs) status_cmd = { 'svn': 'svn status --quiet', 'git': 'git status --short', 'cvs': '#not worked out yet' } if env.repo_type == 'cvs': print "TODO: write CVS status command" return if files.exists(path.join(env.vcs_root_dir, "." + env.repo_type)): with cd(env.vcs_root_dir): status = sudo_or_run(status_cmd[env.repo_type]) if status: print 'Found local changes on %s server' % env.environment print status cont = prompt('Would you like to continue with deployment? (yes/no)', default='no', validate=r'^yes|no$') if cont == 'no': utils.abort('Aborting deployment') if env.repo_type == 'git': _check_git_branch() def checkout_or_update(in_next=False, revision=None): """ checkout or update the project from version control. This command works with svn, git and cvs repositories. You can also specify a revision to checkout, as an argument.""" require('server_project_home', 'repo_type', 'vcs_root_dir', 'repository', provided_by=env.valid_envs) checkout_fn = { 'cvs': _checkout_or_update_cvs, 'svn': _checkout_or_update_svn, 'git': _checkout_or_update_git, } if in_next: vcs_root_dir = env.next_dir else: vcs_root_dir = env.vcs_root_dir if env.repo_type.lower() in checkout_fn: checkout_fn[env.repo_type](vcs_root_dir, revision) else: utils.abort('Unsupported VCS: %s' % env.repo_type.lower()) def _checkout_or_update_svn(vcs_root_dir, revision=None): # function to ask for svnuser and svnpass _get_svn_user_and_pass() # if the .svn directory exists, do an update, otherwise do # a checkout cmd = 'svn %s --non-interactive --no-auth-cache --username %s --password %s' if files.exists(path.join(vcs_root_dir, ".svn")): cmd = cmd % ('update', env.svnuser, env.svnpass) if revision: cmd += " --revision " + revision with cd(vcs_root_dir): with hide('running'): sudo_or_run(cmd) else: cmd = cmd + " %s %s" cmd = cmd % ('checkout', env.svnuser, env.svnpass, env.repository, vcs_root_dir) if revision: cmd += "@" + revision with cd(env.server_project_home): with hide('running'): sudo_or_run(cmd) def _checkout_or_update_git(vcs_root_dir, revision=None): # if the .git directory exists, do an update, otherwise do # a clone if files.exists(path.join(vcs_root_dir, ".git")): with cd(vcs_root_dir): sudo_or_run('git remote rm origin') sudo_or_run('git remote add origin %s' % env.repository) # fetch now, merge later (if on branch) sudo_or_run('git fetch origin') if revision is None: revision = env.revision with cd(vcs_root_dir): stash_result = sudo_or_run('git stash') sudo_or_run('git checkout %s' % revision) # check if revision is a branch, and do a merge if it is with settings(warn_only=True): rev_is_branch = sudo_or_run('git branch -r | grep %s' % revision) # use old fabric style here to support Ubuntu 10.04 if not rev_is_branch.failed: sudo_or_run('git merge origin/%s' % revision) # if we did a stash, now undo it if not stash_result.startswith("No local changes"): sudo_or_run('git stash pop') else: with cd(env.server_project_home): default_branch = env.default_branch.get(env.environment, 'master') sudo_or_run('git clone -b %s %s %s' % (default_branch, env.repository, vcs_root_dir)) if files.exists(path.join(vcs_root_dir, ".gitmodules")): with cd(vcs_root_dir): sudo_or_run('git submodule update --init') def _checkout_or_update_cvs(vcs_root_dir, revision=None): if files.exists(vcs_root_dir): with cd(vcs_root_dir): sudo_or_run('CVS_RSH="ssh" cvs update -d -P') else: if 'cvs_user' in env: user_spec = env.cvs_user + "@" else: user_spec = "" with cd(env.server_project_home): cvs_options = '-d:%s:%s%s:%s' % (env.cvs_connection_type, user_spec, env.repository, env.repo_path) command_options = '-d %s' % vcs_root_dir if revision is not None: command_options += ' -r ' + revision sudo_or_run('%s cvs %s checkout %s %s' % (env.cvs_rsh, cvs_options, command_options, env.cvs_project)) def sudo_or_run(command): if env.use_sudo: return sudo(command) else: return run(command) def create_deploy_virtualenv(in_next=False): """ if using new style dye stuff, create the virtualenv to hold dye """ require('deploy_dir', provided_by=env.valid_envs) if in_next: # TODO: use relative_deploy_dir bootstrap_path = path.join(env.next_dir, 'deploy', 'bootstrap.py') else: bootstrap_path = path.join(env.deploy_dir, 'bootstrap.py') sudo_or_run('%s %s --full-rebuild --quiet' % (_get_python(), bootstrap_path)) def update_requirements(): """ update external dependencies on remote host """ _tasks('update_ve') def collect_static_files(): """ coolect static files in the 'static' directory """ sudo(_get_tasks_bin() + ' collect_static') def clean_db(revision=None): """ delete the entire database """ if env.environment == 'production': utils.abort('do not delete the production database!!!') _tasks("clean_db") def get_remote_dump(filename='/tmp/db_dump.sql', local_filename='./db_dump.sql', rsync=True): """ do a remote database dump and copy it to the local filesystem """ # future enhancement, do a mysqldump --skip-extended-insert (one insert # per line) and then do rsync rather than get() - less data transferred on # however rsync might need ssh keys etc require('user', 'host', provided_by=env.valid_envs) if rsync: _tasks('dump_db:' + filename + ',for_rsync=true') local("rsync -vz -e 'ssh -p %s' %s@%s:%s %s" % (env.port, env.user, env.host, filename, local_filename)) else: _tasks('dump_db:' + filename) get(filename, local_path=local_filename) sudo_or_run('rm ' + filename) def get_remote_dump_and_load(filename='/tmp/db_dump.sql', local_filename='./db_dump.sql', keep_dump=True, rsync=True): """ do a remote database dump, copy it to the local filesystem and then load it into the local database """ get_remote_dump(filename=filename, local_filename=local_filename, rsync=rsync) local(env.local_tasks_bin + ' restore_db:' + local_filename) if not keep_dump: local('rm ' + local_filename) def update_db(force_use_migrations=False): """ create and/or update the database, do migrations etc """ _tasks('update_db:force_use_migrations=%s' % force_use_migrations) def setup_db_dumps(): """ set up mysql database dumps """ require('dump_dir', provided_by=env.valid_envs) _tasks('setup_db_dumps:' + env.dump_dir) def touch_wsgi(): """ touch wsgi file to trigger reload """ require('vcs_root_dir', provided_by=env.valid_envs) wsgi_dir = path.join(env.vcs_root_dir, 'wsgi') sudo_or_run('touch ' + path.join(wsgi_dir, 'wsgi_handler.py')) def rm_pyc_files(py_dir=None): """Remove all the old pyc files to prevent stale files being used""" require('django_dir', provided_by=env.valid_envs) if py_dir is None: py_dir = env.django_dir with settings(warn_only=True): with cd(py_dir): sudo_or_run('find . -name \*.pyc | xargs rm') def _delete_file(path): if files.exists(path): sudo_or_run('rm %s' % path) def _link_files(source_file, target_path): if not files.exists(target_path): sudo_or_run('ln -s %s %s' % (source_file, target_path)) def link_webserver_conf(maintenance=False): """link the webserver conf file""" require('vcs_root_dir', provided_by=env.valid_envs) if env.webserver is None: return vcs_config_stub = path.join(env.vcs_root_dir, env.webserver, env.environment) vcs_config_live = vcs_config_stub + '.conf' vcs_config_maintenance = vcs_config_stub + '-maintenance.conf' webserver_conf = _webserver_conf_path() if maintenance: _delete_file(webserver_conf) if not files.exists(vcs_config_maintenance): return _link_files(vcs_config_maintenance, webserver_conf) else: if not files.exists(vcs_config_live): utils.abort('No %s conf file found - expected %s' % (env.webserver, vcs_config_live)) _delete_file(webserver_conf) _link_files(vcs_config_live, webserver_conf) # debian has sites-available/sites-enabled split with links if _linux_type() == 'debian': webserver_conf_enabled = webserver_conf.replace('available', 'enabled') sudo_or_run('ln -s %s %s' % (webserver_conf, webserver_conf_enabled)) webserver_configtest() def _webserver_conf_path(): webserver_conf_dir = { 'apache_redhat': '/etc/httpd/conf.d', 'apache_debian': '/etc/apache2/sites-available', } key = env.webserver + '_' + _linux_type() if key in webserver_conf_dir: return path.join(webserver_conf_dir[key], '%s_%s.conf' % (env.project_name, env.environment)) else: utils.abort('webserver %s is not supported (linux type %s)' % (env.webserver, _linux_type())) def webserver_configtest(): """ test webserver configuration """ tests = { 'apache_redhat': '/usr/sbin/httpd -S', 'apache_debian': '/usr/sbin/apache2ctl -S', } if env.webserver: key = env.webserver + '_' + _linux_type() if key in tests: sudo(tests[key]) else: utils.abort('webserver %s is not supported (linux type %s)' % (env.webserver, _linux_type())) def webserver_reload(): """ reload webserver on remote host """ webserver_cmd('reload') def webserver_restart(): """ restart webserver on remote host """<|fim▁hole|> def webserver_cmd(cmd): """ run cmd against webserver init.d script """ cmd_strings = { 'apache_redhat': '/etc/init.d/httpd', 'apache_debian': '/etc/init.d/apache2', } if env.webserver: key = env.webserver + '_' + _linux_type() if key in cmd_strings: sudo(cmd_strings[key] + ' ' + cmd) else: utils.abort('webserver %s is not supported' % env.webserver)<|fim▁end|>
webserver_cmd('restart')
<|file_name|>Toolbar.d.ts<|end_file_name|><|fim▁begin|>import * as React from 'react'; import { StandardProps } from '..'; export interface ToolbarProps extends StandardProps<React.HTMLAttributes<HTMLDivElement>, ToolbarClassKey> { disableGutters?: boolean; } export type ToolbarClassKey = 'root' | 'gutters'; <|fim▁hole|><|fim▁end|>
declare const Toolbar: React.ComponentType<ToolbarProps>; export default Toolbar;
<|file_name|>test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- from solution import Solution from solution import TreeNode def constructOne(s): if s == '#': return None else: return TreeNode(int(s)) def createTree(tree): q = [] root = constructOne(tree[0]); q.append(root); idx = 1;<|fim▁hole|> if idx == len(tree): break left = constructOne(tree[idx]) tn.left = left q.append(left) idx += 1 if idx == len(tree): break right = constructOne(tree[idx]) idx += 1 tn.right = right q.append(right) return root # inpt = createTree(['1', '#', '2', '3']) inpt = createTree(['1', '2', '3', '#' , '#', '4', '#', '#', '5']) sol = Solution() res = sol.inorderTraversal(inpt) print(res)<|fim▁end|>
while q: tn = q.pop(0) if not tn: continue
<|file_name|>mac_arabic.py<|end_file_name|><|fim▁begin|>""" Python Character Mapping Codec generated from 'VENDORS/APPLE/ARABIC.TXT' with gencodec.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_map) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_map)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='mac-arabic', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Map decoding_map = codecs.make_identity_dict(range(256)) decoding_map.update({ 0x0080: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS 0x0081: 0x00a0, # NO-BREAK SPACE, right-left 0x0082: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA 0x0083: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE 0x0084: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE 0x0085: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS 0x0086: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS 0x0087: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE 0x0088: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE 0x0089: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX 0x008a: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS 0x008b: 0x06ba, # ARABIC LETTER NOON GHUNNA 0x008c: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left 0x008d: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA 0x008e: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE 0x008f: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE 0x0090: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX 0x0091: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS 0x0092: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE 0x0093: 0x2026, # HORIZONTAL ELLIPSIS, right-left 0x0094: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX 0x0095: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS 0x0096: 0x00f1, # LATIN SMALL LETTER N WITH TILDE 0x0097: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE 0x0098: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left 0x0099: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX 0x009a: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS 0x009b: 0x00f7, # DIVISION SIGN, right-left 0x009c: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE 0x009d: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE 0x009e: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX 0x009f: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS 0x00a0: 0x0020, # SPACE, right-left 0x00a1: 0x0021, # EXCLAMATION MARK, right-left 0x00a2: 0x0022, # QUOTATION MARK, right-left 0x00a3: 0x0023, # NUMBER SIGN, right-left 0x00a4: 0x0024, # DOLLAR SIGN, right-left 0x00a5: 0x066a, # ARABIC PERCENT SIGN 0x00a6: 0x0026, # AMPERSAND, right-left 0x00a7: 0x0027, # APOSTROPHE, right-left 0x00a8: 0x0028, # LEFT PARENTHESIS, right-left 0x00a9: 0x0029, # RIGHT PARENTHESIS, right-left 0x00aa: 0x002a, # ASTERISK, right-left 0x00ab: 0x002b, # PLUS SIGN, right-left 0x00ac: 0x060c, # ARABIC COMMA 0x00ad: 0x002d, # HYPHEN-MINUS, right-left 0x00ae: 0x002e, # FULL STOP, right-left 0x00af: 0x002f, # SOLIDUS, right-left 0x00b0: 0x0660, # ARABIC-INDIC DIGIT ZERO, right-left (need override) 0x00b1: 0x0661, # ARABIC-INDIC DIGIT ONE, right-left (need override) 0x00b2: 0x0662, # ARABIC-INDIC DIGIT TWO, right-left (need override) 0x00b3: 0x0663, # ARABIC-INDIC DIGIT THREE, right-left (need override) 0x00b4: 0x0664, # ARABIC-INDIC DIGIT FOUR, right-left (need override) 0x00b5: 0x0665, # ARABIC-INDIC DIGIT FIVE, right-left (need override) 0x00b6: 0x0666, # ARABIC-INDIC DIGIT SIX, right-left (need override) 0x00b7: 0x0667, # ARABIC-INDIC DIGIT SEVEN, right-left (need override) 0x00b8: 0x0668, # ARABIC-INDIC DIGIT EIGHT, right-left (need override) 0x00b9: 0x0669, # ARABIC-INDIC DIGIT NINE, right-left (need override) 0x00ba: 0x003a, # COLON, right-left 0x00bb: 0x061b, # ARABIC SEMICOLON 0x00bc: 0x003c, # LESS-THAN SIGN, right-left 0x00bd: 0x003d, # EQUALS SIGN, right-left 0x00be: 0x003e, # GREATER-THAN SIGN, right-left 0x00bf: 0x061f, # ARABIC QUESTION MARK <|fim▁hole|> 0x00c3: 0x0623, # ARABIC LETTER ALEF WITH HAMZA ABOVE 0x00c4: 0x0624, # ARABIC LETTER WAW WITH HAMZA ABOVE 0x00c5: 0x0625, # ARABIC LETTER ALEF WITH HAMZA BELOW 0x00c6: 0x0626, # ARABIC LETTER YEH WITH HAMZA ABOVE 0x00c7: 0x0627, # ARABIC LETTER ALEF 0x00c8: 0x0628, # ARABIC LETTER BEH 0x00c9: 0x0629, # ARABIC LETTER TEH MARBUTA 0x00ca: 0x062a, # ARABIC LETTER TEH 0x00cb: 0x062b, # ARABIC LETTER THEH 0x00cc: 0x062c, # ARABIC LETTER JEEM 0x00cd: 0x062d, # ARABIC LETTER HAH 0x00ce: 0x062e, # ARABIC LETTER KHAH 0x00cf: 0x062f, # ARABIC LETTER DAL 0x00d0: 0x0630, # ARABIC LETTER THAL 0x00d1: 0x0631, # ARABIC LETTER REH 0x00d2: 0x0632, # ARABIC LETTER ZAIN 0x00d3: 0x0633, # ARABIC LETTER SEEN 0x00d4: 0x0634, # ARABIC LETTER SHEEN 0x00d5: 0x0635, # ARABIC LETTER SAD 0x00d6: 0x0636, # ARABIC LETTER DAD 0x00d7: 0x0637, # ARABIC LETTER TAH 0x00d8: 0x0638, # ARABIC LETTER ZAH 0x00d9: 0x0639, # ARABIC LETTER AIN 0x00da: 0x063a, # ARABIC LETTER GHAIN 0x00db: 0x005b, # LEFT SQUARE BRACKET, right-left 0x00dc: 0x005c, # REVERSE SOLIDUS, right-left 0x00dd: 0x005d, # RIGHT SQUARE BRACKET, right-left 0x00de: 0x005e, # CIRCUMFLEX ACCENT, right-left 0x00df: 0x005f, # LOW LINE, right-left 0x00e0: 0x0640, # ARABIC TATWEEL 0x00e1: 0x0641, # ARABIC LETTER FEH 0x00e2: 0x0642, # ARABIC LETTER QAF 0x00e3: 0x0643, # ARABIC LETTER KAF 0x00e4: 0x0644, # ARABIC LETTER LAM 0x00e5: 0x0645, # ARABIC LETTER MEEM 0x00e6: 0x0646, # ARABIC LETTER NOON 0x00e7: 0x0647, # ARABIC LETTER HEH 0x00e8: 0x0648, # ARABIC LETTER WAW 0x00e9: 0x0649, # ARABIC LETTER ALEF MAKSURA 0x00ea: 0x064a, # ARABIC LETTER YEH 0x00eb: 0x064b, # ARABIC FATHATAN 0x00ec: 0x064c, # ARABIC DAMMATAN 0x00ed: 0x064d, # ARABIC KASRATAN 0x00ee: 0x064e, # ARABIC FATHA 0x00ef: 0x064f, # ARABIC DAMMA 0x00f0: 0x0650, # ARABIC KASRA 0x00f1: 0x0651, # ARABIC SHADDA 0x00f2: 0x0652, # ARABIC SUKUN 0x00f3: 0x067e, # ARABIC LETTER PEH 0x00f4: 0x0679, # ARABIC LETTER TTEH 0x00f5: 0x0686, # ARABIC LETTER TCHEH 0x00f6: 0x06d5, # ARABIC LETTER AE 0x00f7: 0x06a4, # ARABIC LETTER VEH 0x00f8: 0x06af, # ARABIC LETTER GAF 0x00f9: 0x0688, # ARABIC LETTER DDAL 0x00fa: 0x0691, # ARABIC LETTER RREH 0x00fb: 0x007b, # LEFT CURLY BRACKET, right-left 0x00fc: 0x007c, # VERTICAL LINE, right-left 0x00fd: 0x007d, # RIGHT CURLY BRACKET, right-left 0x00fe: 0x0698, # ARABIC LETTER JEH 0x00ff: 0x06d2, # ARABIC LETTER YEH BARREE }) ### Decoding Table decoding_table = ( '\x00' # 0x0000 -> CONTROL CHARACTER '\x01' # 0x0001 -> CONTROL CHARACTER '\x02' # 0x0002 -> CONTROL CHARACTER '\x03' # 0x0003 -> CONTROL CHARACTER '\x04' # 0x0004 -> CONTROL CHARACTER '\x05' # 0x0005 -> CONTROL CHARACTER '\x06' # 0x0006 -> CONTROL CHARACTER '\x07' # 0x0007 -> CONTROL CHARACTER '\x08' # 0x0008 -> CONTROL CHARACTER '\t' # 0x0009 -> CONTROL CHARACTER '\n' # 0x000a -> CONTROL CHARACTER '\x0b' # 0x000b -> CONTROL CHARACTER '\x0c' # 0x000c -> CONTROL CHARACTER '\r' # 0x000d -> CONTROL CHARACTER '\x0e' # 0x000e -> CONTROL CHARACTER '\x0f' # 0x000f -> CONTROL CHARACTER '\x10' # 0x0010 -> CONTROL CHARACTER '\x11' # 0x0011 -> CONTROL CHARACTER '\x12' # 0x0012 -> CONTROL CHARACTER '\x13' # 0x0013 -> CONTROL CHARACTER '\x14' # 0x0014 -> CONTROL CHARACTER '\x15' # 0x0015 -> CONTROL CHARACTER '\x16' # 0x0016 -> CONTROL CHARACTER '\x17' # 0x0017 -> CONTROL CHARACTER '\x18' # 0x0018 -> CONTROL CHARACTER '\x19' # 0x0019 -> CONTROL CHARACTER '\x1a' # 0x001a -> CONTROL CHARACTER '\x1b' # 0x001b -> CONTROL CHARACTER '\x1c' # 0x001c -> CONTROL CHARACTER '\x1d' # 0x001d -> CONTROL CHARACTER '\x1e' # 0x001e -> CONTROL CHARACTER '\x1f' # 0x001f -> CONTROL CHARACTER ' ' # 0x0020 -> SPACE, left-right '!' # 0x0021 -> EXCLAMATION MARK, left-right '"' # 0x0022 -> QUOTATION MARK, left-right '#' # 0x0023 -> NUMBER SIGN, left-right '$' # 0x0024 -> DOLLAR SIGN, left-right '%' # 0x0025 -> PERCENT SIGN, left-right '&' # 0x0026 -> AMPERSAND, left-right "'" # 0x0027 -> APOSTROPHE, left-right '(' # 0x0028 -> LEFT PARENTHESIS, left-right ')' # 0x0029 -> RIGHT PARENTHESIS, left-right '*' # 0x002a -> ASTERISK, left-right '+' # 0x002b -> PLUS SIGN, left-right ',' # 0x002c -> COMMA, left-right; in Arabic-script context, displayed as 0x066C ARABIC THOUSANDS SEPARATOR '-' # 0x002d -> HYPHEN-MINUS, left-right '.' # 0x002e -> FULL STOP, left-right; in Arabic-script context, displayed as 0x066B ARABIC DECIMAL SEPARATOR '/' # 0x002f -> SOLIDUS, left-right '0' # 0x0030 -> DIGIT ZERO; in Arabic-script context, displayed as 0x0660 ARABIC-INDIC DIGIT ZERO '1' # 0x0031 -> DIGIT ONE; in Arabic-script context, displayed as 0x0661 ARABIC-INDIC DIGIT ONE '2' # 0x0032 -> DIGIT TWO; in Arabic-script context, displayed as 0x0662 ARABIC-INDIC DIGIT TWO '3' # 0x0033 -> DIGIT THREE; in Arabic-script context, displayed as 0x0663 ARABIC-INDIC DIGIT THREE '4' # 0x0034 -> DIGIT FOUR; in Arabic-script context, displayed as 0x0664 ARABIC-INDIC DIGIT FOUR '5' # 0x0035 -> DIGIT FIVE; in Arabic-script context, displayed as 0x0665 ARABIC-INDIC DIGIT FIVE '6' # 0x0036 -> DIGIT SIX; in Arabic-script context, displayed as 0x0666 ARABIC-INDIC DIGIT SIX '7' # 0x0037 -> DIGIT SEVEN; in Arabic-script context, displayed as 0x0667 ARABIC-INDIC DIGIT SEVEN '8' # 0x0038 -> DIGIT EIGHT; in Arabic-script context, displayed as 0x0668 ARABIC-INDIC DIGIT EIGHT '9' # 0x0039 -> DIGIT NINE; in Arabic-script context, displayed as 0x0669 ARABIC-INDIC DIGIT NINE ':' # 0x003a -> COLON, left-right ';' # 0x003b -> SEMICOLON, left-right '<' # 0x003c -> LESS-THAN SIGN, left-right '=' # 0x003d -> EQUALS SIGN, left-right '>' # 0x003e -> GREATER-THAN SIGN, left-right '?' # 0x003f -> QUESTION MARK, left-right '@' # 0x0040 -> COMMERCIAL AT 'A' # 0x0041 -> LATIN CAPITAL LETTER A 'B' # 0x0042 -> LATIN CAPITAL LETTER B 'C' # 0x0043 -> LATIN CAPITAL LETTER C 'D' # 0x0044 -> LATIN CAPITAL LETTER D 'E' # 0x0045 -> LATIN CAPITAL LETTER E 'F' # 0x0046 -> LATIN CAPITAL LETTER F 'G' # 0x0047 -> LATIN CAPITAL LETTER G 'H' # 0x0048 -> LATIN CAPITAL LETTER H 'I' # 0x0049 -> LATIN CAPITAL LETTER I 'J' # 0x004a -> LATIN CAPITAL LETTER J 'K' # 0x004b -> LATIN CAPITAL LETTER K 'L' # 0x004c -> LATIN CAPITAL LETTER L 'M' # 0x004d -> LATIN CAPITAL LETTER M 'N' # 0x004e -> LATIN CAPITAL LETTER N 'O' # 0x004f -> LATIN CAPITAL LETTER O 'P' # 0x0050 -> LATIN CAPITAL LETTER P 'Q' # 0x0051 -> LATIN CAPITAL LETTER Q 'R' # 0x0052 -> LATIN CAPITAL LETTER R 'S' # 0x0053 -> LATIN CAPITAL LETTER S 'T' # 0x0054 -> LATIN CAPITAL LETTER T 'U' # 0x0055 -> LATIN CAPITAL LETTER U 'V' # 0x0056 -> LATIN CAPITAL LETTER V 'W' # 0x0057 -> LATIN CAPITAL LETTER W 'X' # 0x0058 -> LATIN CAPITAL LETTER X 'Y' # 0x0059 -> LATIN CAPITAL LETTER Y 'Z' # 0x005a -> LATIN CAPITAL LETTER Z '[' # 0x005b -> LEFT SQUARE BRACKET, left-right '\\' # 0x005c -> REVERSE SOLIDUS, left-right ']' # 0x005d -> RIGHT SQUARE BRACKET, left-right '^' # 0x005e -> CIRCUMFLEX ACCENT, left-right '_' # 0x005f -> LOW LINE, left-right '`' # 0x0060 -> GRAVE ACCENT 'a' # 0x0061 -> LATIN SMALL LETTER A 'b' # 0x0062 -> LATIN SMALL LETTER B 'c' # 0x0063 -> LATIN SMALL LETTER C 'd' # 0x0064 -> LATIN SMALL LETTER D 'e' # 0x0065 -> LATIN SMALL LETTER E 'f' # 0x0066 -> LATIN SMALL LETTER F 'g' # 0x0067 -> LATIN SMALL LETTER G 'h' # 0x0068 -> LATIN SMALL LETTER H 'i' # 0x0069 -> LATIN SMALL LETTER I 'j' # 0x006a -> LATIN SMALL LETTER J 'k' # 0x006b -> LATIN SMALL LETTER K 'l' # 0x006c -> LATIN SMALL LETTER L 'm' # 0x006d -> LATIN SMALL LETTER M 'n' # 0x006e -> LATIN SMALL LETTER N 'o' # 0x006f -> LATIN SMALL LETTER O 'p' # 0x0070 -> LATIN SMALL LETTER P 'q' # 0x0071 -> LATIN SMALL LETTER Q 'r' # 0x0072 -> LATIN SMALL LETTER R 's' # 0x0073 -> LATIN SMALL LETTER S 't' # 0x0074 -> LATIN SMALL LETTER T 'u' # 0x0075 -> LATIN SMALL LETTER U 'v' # 0x0076 -> LATIN SMALL LETTER V 'w' # 0x0077 -> LATIN SMALL LETTER W 'x' # 0x0078 -> LATIN SMALL LETTER X 'y' # 0x0079 -> LATIN SMALL LETTER Y 'z' # 0x007a -> LATIN SMALL LETTER Z '{' # 0x007b -> LEFT CURLY BRACKET, left-right '|' # 0x007c -> VERTICAL LINE, left-right '}' # 0x007d -> RIGHT CURLY BRACKET, left-right '~' # 0x007e -> TILDE '\x7f' # 0x007f -> CONTROL CHARACTER '\xc4' # 0x0080 -> LATIN CAPITAL LETTER A WITH DIAERESIS '\xa0' # 0x0081 -> NO-BREAK SPACE, right-left '\xc7' # 0x0082 -> LATIN CAPITAL LETTER C WITH CEDILLA '\xc9' # 0x0083 -> LATIN CAPITAL LETTER E WITH ACUTE '\xd1' # 0x0084 -> LATIN CAPITAL LETTER N WITH TILDE '\xd6' # 0x0085 -> LATIN CAPITAL LETTER O WITH DIAERESIS '\xdc' # 0x0086 -> LATIN CAPITAL LETTER U WITH DIAERESIS '\xe1' # 0x0087 -> LATIN SMALL LETTER A WITH ACUTE '\xe0' # 0x0088 -> LATIN SMALL LETTER A WITH GRAVE '\xe2' # 0x0089 -> LATIN SMALL LETTER A WITH CIRCUMFLEX '\xe4' # 0x008a -> LATIN SMALL LETTER A WITH DIAERESIS '\u06ba' # 0x008b -> ARABIC LETTER NOON GHUNNA '\xab' # 0x008c -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left '\xe7' # 0x008d -> LATIN SMALL LETTER C WITH CEDILLA '\xe9' # 0x008e -> LATIN SMALL LETTER E WITH ACUTE '\xe8' # 0x008f -> LATIN SMALL LETTER E WITH GRAVE '\xea' # 0x0090 -> LATIN SMALL LETTER E WITH CIRCUMFLEX '\xeb' # 0x0091 -> LATIN SMALL LETTER E WITH DIAERESIS '\xed' # 0x0092 -> LATIN SMALL LETTER I WITH ACUTE '\u2026' # 0x0093 -> HORIZONTAL ELLIPSIS, right-left '\xee' # 0x0094 -> LATIN SMALL LETTER I WITH CIRCUMFLEX '\xef' # 0x0095 -> LATIN SMALL LETTER I WITH DIAERESIS '\xf1' # 0x0096 -> LATIN SMALL LETTER N WITH TILDE '\xf3' # 0x0097 -> LATIN SMALL LETTER O WITH ACUTE '\xbb' # 0x0098 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left '\xf4' # 0x0099 -> LATIN SMALL LETTER O WITH CIRCUMFLEX '\xf6' # 0x009a -> LATIN SMALL LETTER O WITH DIAERESIS '\xf7' # 0x009b -> DIVISION SIGN, right-left '\xfa' # 0x009c -> LATIN SMALL LETTER U WITH ACUTE '\xf9' # 0x009d -> LATIN SMALL LETTER U WITH GRAVE '\xfb' # 0x009e -> LATIN SMALL LETTER U WITH CIRCUMFLEX '\xfc' # 0x009f -> LATIN SMALL LETTER U WITH DIAERESIS ' ' # 0x00a0 -> SPACE, right-left '!' # 0x00a1 -> EXCLAMATION MARK, right-left '"' # 0x00a2 -> QUOTATION MARK, right-left '#' # 0x00a3 -> NUMBER SIGN, right-left '$' # 0x00a4 -> DOLLAR SIGN, right-left '\u066a' # 0x00a5 -> ARABIC PERCENT SIGN '&' # 0x00a6 -> AMPERSAND, right-left "'" # 0x00a7 -> APOSTROPHE, right-left '(' # 0x00a8 -> LEFT PARENTHESIS, right-left ')' # 0x00a9 -> RIGHT PARENTHESIS, right-left '*' # 0x00aa -> ASTERISK, right-left '+' # 0x00ab -> PLUS SIGN, right-left '\u060c' # 0x00ac -> ARABIC COMMA '-' # 0x00ad -> HYPHEN-MINUS, right-left '.' # 0x00ae -> FULL STOP, right-left '/' # 0x00af -> SOLIDUS, right-left '\u0660' # 0x00b0 -> ARABIC-INDIC DIGIT ZERO, right-left (need override) '\u0661' # 0x00b1 -> ARABIC-INDIC DIGIT ONE, right-left (need override) '\u0662' # 0x00b2 -> ARABIC-INDIC DIGIT TWO, right-left (need override) '\u0663' # 0x00b3 -> ARABIC-INDIC DIGIT THREE, right-left (need override) '\u0664' # 0x00b4 -> ARABIC-INDIC DIGIT FOUR, right-left (need override) '\u0665' # 0x00b5 -> ARABIC-INDIC DIGIT FIVE, right-left (need override) '\u0666' # 0x00b6 -> ARABIC-INDIC DIGIT SIX, right-left (need override) '\u0667' # 0x00b7 -> ARABIC-INDIC DIGIT SEVEN, right-left (need override) '\u0668' # 0x00b8 -> ARABIC-INDIC DIGIT EIGHT, right-left (need override) '\u0669' # 0x00b9 -> ARABIC-INDIC DIGIT NINE, right-left (need override) ':' # 0x00ba -> COLON, right-left '\u061b' # 0x00bb -> ARABIC SEMICOLON '<' # 0x00bc -> LESS-THAN SIGN, right-left '=' # 0x00bd -> EQUALS SIGN, right-left '>' # 0x00be -> GREATER-THAN SIGN, right-left '\u061f' # 0x00bf -> ARABIC QUESTION MARK '\u274a' # 0x00c0 -> EIGHT TEARDROP-SPOKED PROPELLER ASTERISK, right-left '\u0621' # 0x00c1 -> ARABIC LETTER HAMZA '\u0622' # 0x00c2 -> ARABIC LETTER ALEF WITH MADDA ABOVE '\u0623' # 0x00c3 -> ARABIC LETTER ALEF WITH HAMZA ABOVE '\u0624' # 0x00c4 -> ARABIC LETTER WAW WITH HAMZA ABOVE '\u0625' # 0x00c5 -> ARABIC LETTER ALEF WITH HAMZA BELOW '\u0626' # 0x00c6 -> ARABIC LETTER YEH WITH HAMZA ABOVE '\u0627' # 0x00c7 -> ARABIC LETTER ALEF '\u0628' # 0x00c8 -> ARABIC LETTER BEH '\u0629' # 0x00c9 -> ARABIC LETTER TEH MARBUTA '\u062a' # 0x00ca -> ARABIC LETTER TEH '\u062b' # 0x00cb -> ARABIC LETTER THEH '\u062c' # 0x00cc -> ARABIC LETTER JEEM '\u062d' # 0x00cd -> ARABIC LETTER HAH '\u062e' # 0x00ce -> ARABIC LETTER KHAH '\u062f' # 0x00cf -> ARABIC LETTER DAL '\u0630' # 0x00d0 -> ARABIC LETTER THAL '\u0631' # 0x00d1 -> ARABIC LETTER REH '\u0632' # 0x00d2 -> ARABIC LETTER ZAIN '\u0633' # 0x00d3 -> ARABIC LETTER SEEN '\u0634' # 0x00d4 -> ARABIC LETTER SHEEN '\u0635' # 0x00d5 -> ARABIC LETTER SAD '\u0636' # 0x00d6 -> ARABIC LETTER DAD '\u0637' # 0x00d7 -> ARABIC LETTER TAH '\u0638' # 0x00d8 -> ARABIC LETTER ZAH '\u0639' # 0x00d9 -> ARABIC LETTER AIN '\u063a' # 0x00da -> ARABIC LETTER GHAIN '[' # 0x00db -> LEFT SQUARE BRACKET, right-left '\\' # 0x00dc -> REVERSE SOLIDUS, right-left ']' # 0x00dd -> RIGHT SQUARE BRACKET, right-left '^' # 0x00de -> CIRCUMFLEX ACCENT, right-left '_' # 0x00df -> LOW LINE, right-left '\u0640' # 0x00e0 -> ARABIC TATWEEL '\u0641' # 0x00e1 -> ARABIC LETTER FEH '\u0642' # 0x00e2 -> ARABIC LETTER QAF '\u0643' # 0x00e3 -> ARABIC LETTER KAF '\u0644' # 0x00e4 -> ARABIC LETTER LAM '\u0645' # 0x00e5 -> ARABIC LETTER MEEM '\u0646' # 0x00e6 -> ARABIC LETTER NOON '\u0647' # 0x00e7 -> ARABIC LETTER HEH '\u0648' # 0x00e8 -> ARABIC LETTER WAW '\u0649' # 0x00e9 -> ARABIC LETTER ALEF MAKSURA '\u064a' # 0x00ea -> ARABIC LETTER YEH '\u064b' # 0x00eb -> ARABIC FATHATAN '\u064c' # 0x00ec -> ARABIC DAMMATAN '\u064d' # 0x00ed -> ARABIC KASRATAN '\u064e' # 0x00ee -> ARABIC FATHA '\u064f' # 0x00ef -> ARABIC DAMMA '\u0650' # 0x00f0 -> ARABIC KASRA '\u0651' # 0x00f1 -> ARABIC SHADDA '\u0652' # 0x00f2 -> ARABIC SUKUN '\u067e' # 0x00f3 -> ARABIC LETTER PEH '\u0679' # 0x00f4 -> ARABIC LETTER TTEH '\u0686' # 0x00f5 -> ARABIC LETTER TCHEH '\u06d5' # 0x00f6 -> ARABIC LETTER AE '\u06a4' # 0x00f7 -> ARABIC LETTER VEH '\u06af' # 0x00f8 -> ARABIC LETTER GAF '\u0688' # 0x00f9 -> ARABIC LETTER DDAL '\u0691' # 0x00fa -> ARABIC LETTER RREH '{' # 0x00fb -> LEFT CURLY BRACKET, right-left '|' # 0x00fc -> VERTICAL LINE, right-left '}' # 0x00fd -> RIGHT CURLY BRACKET, right-left '\u0698' # 0x00fe -> ARABIC LETTER JEH '\u06d2' # 0x00ff -> ARABIC LETTER YEH BARREE ) ### Encoding Map encoding_map = { 0x0000: 0x0000, # CONTROL CHARACTER 0x0001: 0x0001, # CONTROL CHARACTER 0x0002: 0x0002, # CONTROL CHARACTER 0x0003: 0x0003, # CONTROL CHARACTER 0x0004: 0x0004, # CONTROL CHARACTER 0x0005: 0x0005, # CONTROL CHARACTER 0x0006: 0x0006, # CONTROL CHARACTER 0x0007: 0x0007, # CONTROL CHARACTER 0x0008: 0x0008, # CONTROL CHARACTER 0x0009: 0x0009, # CONTROL CHARACTER 0x000a: 0x000a, # CONTROL CHARACTER 0x000b: 0x000b, # CONTROL CHARACTER 0x000c: 0x000c, # CONTROL CHARACTER 0x000d: 0x000d, # CONTROL CHARACTER 0x000e: 0x000e, # CONTROL CHARACTER 0x000f: 0x000f, # CONTROL CHARACTER 0x0010: 0x0010, # CONTROL CHARACTER 0x0011: 0x0011, # CONTROL CHARACTER 0x0012: 0x0012, # CONTROL CHARACTER 0x0013: 0x0013, # CONTROL CHARACTER 0x0014: 0x0014, # CONTROL CHARACTER 0x0015: 0x0015, # CONTROL CHARACTER 0x0016: 0x0016, # CONTROL CHARACTER 0x0017: 0x0017, # CONTROL CHARACTER 0x0018: 0x0018, # CONTROL CHARACTER 0x0019: 0x0019, # CONTROL CHARACTER 0x001a: 0x001a, # CONTROL CHARACTER 0x001b: 0x001b, # CONTROL CHARACTER 0x001c: 0x001c, # CONTROL CHARACTER 0x001d: 0x001d, # CONTROL CHARACTER 0x001e: 0x001e, # CONTROL CHARACTER 0x001f: 0x001f, # CONTROL CHARACTER 0x0020: 0x0020, # SPACE, left-right 0x0020: 0x00a0, # SPACE, right-left 0x0021: 0x0021, # EXCLAMATION MARK, left-right 0x0021: 0x00a1, # EXCLAMATION MARK, right-left 0x0022: 0x0022, # QUOTATION MARK, left-right 0x0022: 0x00a2, # QUOTATION MARK, right-left 0x0023: 0x0023, # NUMBER SIGN, left-right 0x0023: 0x00a3, # NUMBER SIGN, right-left 0x0024: 0x0024, # DOLLAR SIGN, left-right 0x0024: 0x00a4, # DOLLAR SIGN, right-left 0x0025: 0x0025, # PERCENT SIGN, left-right 0x0026: 0x0026, # AMPERSAND, left-right 0x0026: 0x00a6, # AMPERSAND, right-left 0x0027: 0x0027, # APOSTROPHE, left-right 0x0027: 0x00a7, # APOSTROPHE, right-left 0x0028: 0x0028, # LEFT PARENTHESIS, left-right 0x0028: 0x00a8, # LEFT PARENTHESIS, right-left 0x0029: 0x0029, # RIGHT PARENTHESIS, left-right 0x0029: 0x00a9, # RIGHT PARENTHESIS, right-left 0x002a: 0x002a, # ASTERISK, left-right 0x002a: 0x00aa, # ASTERISK, right-left 0x002b: 0x002b, # PLUS SIGN, left-right 0x002b: 0x00ab, # PLUS SIGN, right-left 0x002c: 0x002c, # COMMA, left-right; in Arabic-script context, displayed as 0x066C ARABIC THOUSANDS SEPARATOR 0x002d: 0x002d, # HYPHEN-MINUS, left-right 0x002d: 0x00ad, # HYPHEN-MINUS, right-left 0x002e: 0x002e, # FULL STOP, left-right; in Arabic-script context, displayed as 0x066B ARABIC DECIMAL SEPARATOR 0x002e: 0x00ae, # FULL STOP, right-left 0x002f: 0x002f, # SOLIDUS, left-right 0x002f: 0x00af, # SOLIDUS, right-left 0x0030: 0x0030, # DIGIT ZERO; in Arabic-script context, displayed as 0x0660 ARABIC-INDIC DIGIT ZERO 0x0031: 0x0031, # DIGIT ONE; in Arabic-script context, displayed as 0x0661 ARABIC-INDIC DIGIT ONE 0x0032: 0x0032, # DIGIT TWO; in Arabic-script context, displayed as 0x0662 ARABIC-INDIC DIGIT TWO 0x0033: 0x0033, # DIGIT THREE; in Arabic-script context, displayed as 0x0663 ARABIC-INDIC DIGIT THREE 0x0034: 0x0034, # DIGIT FOUR; in Arabic-script context, displayed as 0x0664 ARABIC-INDIC DIGIT FOUR 0x0035: 0x0035, # DIGIT FIVE; in Arabic-script context, displayed as 0x0665 ARABIC-INDIC DIGIT FIVE 0x0036: 0x0036, # DIGIT SIX; in Arabic-script context, displayed as 0x0666 ARABIC-INDIC DIGIT SIX 0x0037: 0x0037, # DIGIT SEVEN; in Arabic-script context, displayed as 0x0667 ARABIC-INDIC DIGIT SEVEN 0x0038: 0x0038, # DIGIT EIGHT; in Arabic-script context, displayed as 0x0668 ARABIC-INDIC DIGIT EIGHT 0x0039: 0x0039, # DIGIT NINE; in Arabic-script context, displayed as 0x0669 ARABIC-INDIC DIGIT NINE 0x003a: 0x003a, # COLON, left-right 0x003a: 0x00ba, # COLON, right-left 0x003b: 0x003b, # SEMICOLON, left-right 0x003c: 0x003c, # LESS-THAN SIGN, left-right 0x003c: 0x00bc, # LESS-THAN SIGN, right-left 0x003d: 0x003d, # EQUALS SIGN, left-right 0x003d: 0x00bd, # EQUALS SIGN, right-left 0x003e: 0x003e, # GREATER-THAN SIGN, left-right 0x003e: 0x00be, # GREATER-THAN SIGN, right-left 0x003f: 0x003f, # QUESTION MARK, left-right 0x0040: 0x0040, # COMMERCIAL AT 0x0041: 0x0041, # LATIN CAPITAL LETTER A 0x0042: 0x0042, # LATIN CAPITAL LETTER B 0x0043: 0x0043, # LATIN CAPITAL LETTER C 0x0044: 0x0044, # LATIN CAPITAL LETTER D 0x0045: 0x0045, # LATIN CAPITAL LETTER E 0x0046: 0x0046, # LATIN CAPITAL LETTER F 0x0047: 0x0047, # LATIN CAPITAL LETTER G 0x0048: 0x0048, # LATIN CAPITAL LETTER H 0x0049: 0x0049, # LATIN CAPITAL LETTER I 0x004a: 0x004a, # LATIN CAPITAL LETTER J 0x004b: 0x004b, # LATIN CAPITAL LETTER K 0x004c: 0x004c, # LATIN CAPITAL LETTER L 0x004d: 0x004d, # LATIN CAPITAL LETTER M 0x004e: 0x004e, # LATIN CAPITAL LETTER N 0x004f: 0x004f, # LATIN CAPITAL LETTER O 0x0050: 0x0050, # LATIN CAPITAL LETTER P 0x0051: 0x0051, # LATIN CAPITAL LETTER Q 0x0052: 0x0052, # LATIN CAPITAL LETTER R 0x0053: 0x0053, # LATIN CAPITAL LETTER S 0x0054: 0x0054, # LATIN CAPITAL LETTER T 0x0055: 0x0055, # LATIN CAPITAL LETTER U 0x0056: 0x0056, # LATIN CAPITAL LETTER V 0x0057: 0x0057, # LATIN CAPITAL LETTER W 0x0058: 0x0058, # LATIN CAPITAL LETTER X 0x0059: 0x0059, # LATIN CAPITAL LETTER Y 0x005a: 0x005a, # LATIN CAPITAL LETTER Z 0x005b: 0x005b, # LEFT SQUARE BRACKET, left-right 0x005b: 0x00db, # LEFT SQUARE BRACKET, right-left 0x005c: 0x005c, # REVERSE SOLIDUS, left-right 0x005c: 0x00dc, # REVERSE SOLIDUS, right-left 0x005d: 0x005d, # RIGHT SQUARE BRACKET, left-right 0x005d: 0x00dd, # RIGHT SQUARE BRACKET, right-left 0x005e: 0x005e, # CIRCUMFLEX ACCENT, left-right 0x005e: 0x00de, # CIRCUMFLEX ACCENT, right-left 0x005f: 0x005f, # LOW LINE, left-right 0x005f: 0x00df, # LOW LINE, right-left 0x0060: 0x0060, # GRAVE ACCENT 0x0061: 0x0061, # LATIN SMALL LETTER A 0x0062: 0x0062, # LATIN SMALL LETTER B 0x0063: 0x0063, # LATIN SMALL LETTER C 0x0064: 0x0064, # LATIN SMALL LETTER D 0x0065: 0x0065, # LATIN SMALL LETTER E 0x0066: 0x0066, # LATIN SMALL LETTER F 0x0067: 0x0067, # LATIN SMALL LETTER G 0x0068: 0x0068, # LATIN SMALL LETTER H 0x0069: 0x0069, # LATIN SMALL LETTER I 0x006a: 0x006a, # LATIN SMALL LETTER J 0x006b: 0x006b, # LATIN SMALL LETTER K 0x006c: 0x006c, # LATIN SMALL LETTER L 0x006d: 0x006d, # LATIN SMALL LETTER M 0x006e: 0x006e, # LATIN SMALL LETTER N 0x006f: 0x006f, # LATIN SMALL LETTER O 0x0070: 0x0070, # LATIN SMALL LETTER P 0x0071: 0x0071, # LATIN SMALL LETTER Q 0x0072: 0x0072, # LATIN SMALL LETTER R 0x0073: 0x0073, # LATIN SMALL LETTER S 0x0074: 0x0074, # LATIN SMALL LETTER T 0x0075: 0x0075, # LATIN SMALL LETTER U 0x0076: 0x0076, # LATIN SMALL LETTER V 0x0077: 0x0077, # LATIN SMALL LETTER W 0x0078: 0x0078, # LATIN SMALL LETTER X 0x0079: 0x0079, # LATIN SMALL LETTER Y 0x007a: 0x007a, # LATIN SMALL LETTER Z 0x007b: 0x007b, # LEFT CURLY BRACKET, left-right 0x007b: 0x00fb, # LEFT CURLY BRACKET, right-left 0x007c: 0x007c, # VERTICAL LINE, left-right 0x007c: 0x00fc, # VERTICAL LINE, right-left 0x007d: 0x007d, # RIGHT CURLY BRACKET, left-right 0x007d: 0x00fd, # RIGHT CURLY BRACKET, right-left 0x007e: 0x007e, # TILDE 0x007f: 0x007f, # CONTROL CHARACTER 0x00a0: 0x0081, # NO-BREAK SPACE, right-left 0x00ab: 0x008c, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left 0x00bb: 0x0098, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left 0x00c4: 0x0080, # LATIN CAPITAL LETTER A WITH DIAERESIS 0x00c7: 0x0082, # LATIN CAPITAL LETTER C WITH CEDILLA 0x00c9: 0x0083, # LATIN CAPITAL LETTER E WITH ACUTE 0x00d1: 0x0084, # LATIN CAPITAL LETTER N WITH TILDE 0x00d6: 0x0085, # LATIN CAPITAL LETTER O WITH DIAERESIS 0x00dc: 0x0086, # LATIN CAPITAL LETTER U WITH DIAERESIS 0x00e0: 0x0088, # LATIN SMALL LETTER A WITH GRAVE 0x00e1: 0x0087, # LATIN SMALL LETTER A WITH ACUTE 0x00e2: 0x0089, # LATIN SMALL LETTER A WITH CIRCUMFLEX 0x00e4: 0x008a, # LATIN SMALL LETTER A WITH DIAERESIS 0x00e7: 0x008d, # LATIN SMALL LETTER C WITH CEDILLA 0x00e8: 0x008f, # LATIN SMALL LETTER E WITH GRAVE 0x00e9: 0x008e, # LATIN SMALL LETTER E WITH ACUTE 0x00ea: 0x0090, # LATIN SMALL LETTER E WITH CIRCUMFLEX 0x00eb: 0x0091, # LATIN SMALL LETTER E WITH DIAERESIS 0x00ed: 0x0092, # LATIN SMALL LETTER I WITH ACUTE 0x00ee: 0x0094, # LATIN SMALL LETTER I WITH CIRCUMFLEX 0x00ef: 0x0095, # LATIN SMALL LETTER I WITH DIAERESIS 0x00f1: 0x0096, # LATIN SMALL LETTER N WITH TILDE 0x00f3: 0x0097, # LATIN SMALL LETTER O WITH ACUTE 0x00f4: 0x0099, # LATIN SMALL LETTER O WITH CIRCUMFLEX 0x00f6: 0x009a, # LATIN SMALL LETTER O WITH DIAERESIS 0x00f7: 0x009b, # DIVISION SIGN, right-left 0x00f9: 0x009d, # LATIN SMALL LETTER U WITH GRAVE 0x00fa: 0x009c, # LATIN SMALL LETTER U WITH ACUTE 0x00fb: 0x009e, # LATIN SMALL LETTER U WITH CIRCUMFLEX 0x00fc: 0x009f, # LATIN SMALL LETTER U WITH DIAERESIS 0x060c: 0x00ac, # ARABIC COMMA 0x061b: 0x00bb, # ARABIC SEMICOLON 0x061f: 0x00bf, # ARABIC QUESTION MARK 0x0621: 0x00c1, # ARABIC LETTER HAMZA 0x0622: 0x00c2, # ARABIC LETTER ALEF WITH MADDA ABOVE 0x0623: 0x00c3, # ARABIC LETTER ALEF WITH HAMZA ABOVE 0x0624: 0x00c4, # ARABIC LETTER WAW WITH HAMZA ABOVE 0x0625: 0x00c5, # ARABIC LETTER ALEF WITH HAMZA BELOW 0x0626: 0x00c6, # ARABIC LETTER YEH WITH HAMZA ABOVE 0x0627: 0x00c7, # ARABIC LETTER ALEF 0x0628: 0x00c8, # ARABIC LETTER BEH 0x0629: 0x00c9, # ARABIC LETTER TEH MARBUTA 0x062a: 0x00ca, # ARABIC LETTER TEH 0x062b: 0x00cb, # ARABIC LETTER THEH 0x062c: 0x00cc, # ARABIC LETTER JEEM 0x062d: 0x00cd, # ARABIC LETTER HAH 0x062e: 0x00ce, # ARABIC LETTER KHAH 0x062f: 0x00cf, # ARABIC LETTER DAL 0x0630: 0x00d0, # ARABIC LETTER THAL 0x0631: 0x00d1, # ARABIC LETTER REH 0x0632: 0x00d2, # ARABIC LETTER ZAIN 0x0633: 0x00d3, # ARABIC LETTER SEEN 0x0634: 0x00d4, # ARABIC LETTER SHEEN 0x0635: 0x00d5, # ARABIC LETTER SAD 0x0636: 0x00d6, # ARABIC LETTER DAD 0x0637: 0x00d7, # ARABIC LETTER TAH 0x0638: 0x00d8, # ARABIC LETTER ZAH 0x0639: 0x00d9, # ARABIC LETTER AIN 0x063a: 0x00da, # ARABIC LETTER GHAIN 0x0640: 0x00e0, # ARABIC TATWEEL 0x0641: 0x00e1, # ARABIC LETTER FEH 0x0642: 0x00e2, # ARABIC LETTER QAF 0x0643: 0x00e3, # ARABIC LETTER KAF 0x0644: 0x00e4, # ARABIC LETTER LAM 0x0645: 0x00e5, # ARABIC LETTER MEEM 0x0646: 0x00e6, # ARABIC LETTER NOON 0x0647: 0x00e7, # ARABIC LETTER HEH 0x0648: 0x00e8, # ARABIC LETTER WAW 0x0649: 0x00e9, # ARABIC LETTER ALEF MAKSURA 0x064a: 0x00ea, # ARABIC LETTER YEH 0x064b: 0x00eb, # ARABIC FATHATAN 0x064c: 0x00ec, # ARABIC DAMMATAN 0x064d: 0x00ed, # ARABIC KASRATAN 0x064e: 0x00ee, # ARABIC FATHA 0x064f: 0x00ef, # ARABIC DAMMA 0x0650: 0x00f0, # ARABIC KASRA 0x0651: 0x00f1, # ARABIC SHADDA 0x0652: 0x00f2, # ARABIC SUKUN 0x0660: 0x00b0, # ARABIC-INDIC DIGIT ZERO, right-left (need override) 0x0661: 0x00b1, # ARABIC-INDIC DIGIT ONE, right-left (need override) 0x0662: 0x00b2, # ARABIC-INDIC DIGIT TWO, right-left (need override) 0x0663: 0x00b3, # ARABIC-INDIC DIGIT THREE, right-left (need override) 0x0664: 0x00b4, # ARABIC-INDIC DIGIT FOUR, right-left (need override) 0x0665: 0x00b5, # ARABIC-INDIC DIGIT FIVE, right-left (need override) 0x0666: 0x00b6, # ARABIC-INDIC DIGIT SIX, right-left (need override) 0x0667: 0x00b7, # ARABIC-INDIC DIGIT SEVEN, right-left (need override) 0x0668: 0x00b8, # ARABIC-INDIC DIGIT EIGHT, right-left (need override) 0x0669: 0x00b9, # ARABIC-INDIC DIGIT NINE, right-left (need override) 0x066a: 0x00a5, # ARABIC PERCENT SIGN 0x0679: 0x00f4, # ARABIC LETTER TTEH 0x067e: 0x00f3, # ARABIC LETTER PEH 0x0686: 0x00f5, # ARABIC LETTER TCHEH 0x0688: 0x00f9, # ARABIC LETTER DDAL 0x0691: 0x00fa, # ARABIC LETTER RREH 0x0698: 0x00fe, # ARABIC LETTER JEH 0x06a4: 0x00f7, # ARABIC LETTER VEH 0x06af: 0x00f8, # ARABIC LETTER GAF 0x06ba: 0x008b, # ARABIC LETTER NOON GHUNNA 0x06d2: 0x00ff, # ARABIC LETTER YEH BARREE 0x06d5: 0x00f6, # ARABIC LETTER AE 0x2026: 0x0093, # HORIZONTAL ELLIPSIS, right-left 0x274a: 0x00c0, # EIGHT TEARDROP-SPOKED PROPELLER ASTERISK, right-left }<|fim▁end|>
0x00c0: 0x274a, # EIGHT TEARDROP-SPOKED PROPELLER ASTERISK, right-left 0x00c1: 0x0621, # ARABIC LETTER HAMZA 0x00c2: 0x0622, # ARABIC LETTER ALEF WITH MADDA ABOVE
<|file_name|>messenger_contact_add.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Copyright (C) 2007 Johann Prieur <johann.prieur@gmail.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA # from papyon.service.AddressBook.scenario.base import BaseScenario from papyon.service.AddressBook.scenario.base import Scenario from papyon.service.description.AB.constants import ContactEmailType from papyon.profile import ContactType, NetworkID <|fim▁hole|> account='', network_id=NetworkID.MSN, contact_type=ContactType.REGULAR, contact_info={}, invite_display_name='', invite_message=''): """Adds a messenger contact and updates the address book. @param ab: the address book service @param callback: tuple(callable, *args) @param errback: tuple(callable, *args)""" BaseScenario.__init__(self, Scenario.CONTACT_SAVE, callback, errback) self._ab = ab self.account = account self.network_id = network_id self.contact_type = contact_type self.contact_info = contact_info self.invite_display_name = invite_display_name self.invite_message = invite_message self.auto_manage_allow_list = True def execute(self): invite_info = { 'display_name' : self.invite_display_name, 'invite_message' : self.invite_message } if self.network_id == NetworkID.MSN: self.contact_info['passport_name'] = self.account self.contact_info['contact_type'] = self.contact_type self.contact_info['is_messenger_user'] = True elif self.network_id == NetworkID.EXTERNAL: self.contact_info.setdefault('email', {})[ContactEmailType.EXTERNAL] = self.account self.contact_info['capability'] = self.network_id else: raise NotImplementedError("Network ID '%s' is not implemented" % self.network_id) self._ab.ContactAdd(self._callback, self._errback, self._scenario, self.contact_info, invite_info, self.auto_manage_allow_list)<|fim▁end|>
__all__ = ['MessengerContactAddScenario'] class MessengerContactAddScenario(BaseScenario): def __init__(self, ab, callback, errback,
<|file_name|>ArcherCommand.java<|end_file_name|><|fim▁begin|>package com.iluwatar.front.controller; /** * * Command for archers. *<|fim▁hole|> */ public class ArcherCommand implements Command { @Override public void process() { new ArcherView().display(); } }<|fim▁end|>
<|file_name|>operations.hpp<|end_file_name|><|fim▁begin|>#ifndef _OPERATIONS_HPP #define _OPERATIONS_HPP #define CL_USE_DEPRECATED_OPENCL_1_1_APIS #define CL_USE_DEPRECATED_OPENCL_1_2_APIS #include <CL/cl.h> #include "matrix.hpp"<|fim▁hole|>public: virtual ~Operations(void) { } virtual Matrix multiply(const Matrix& lhs, const Matrix& rhs) const = 0; }; class CpuOperations : public Operations { public: virtual Matrix multiply(const Matrix& lhs, const Matrix& rhs) const; }; class GpuOperations : public Operations { private: cl_device_id m_deviceId; public: GpuOperations(void); virtual Matrix multiply(const Matrix& lhs, const Matrix& rhs) const; protected: GpuOperations(std::string kernelFile); cl_context createContext(void) const; cl_command_queue createCommandQueue(cl_context context) const; cl_program buildProgram(cl_context context, const std::string& filename) const; cl_kernel createKernel(cl_context context, cl_program program, const std::string& name) const; cl_mem uploadBuffer(cl_context context, size_t size, const void* dataPtr) const; static cl_device_id selectDevice(void); CleanUp<cl_context> m_context; CleanUp<cl_command_queue> m_queue; CleanUp<cl_program> m_program; }; class TransposedGpuOperations : public GpuOperations { public: TransposedGpuOperations(void); virtual Matrix multiply(const Matrix& lhs, const Matrix& rhs) const; }; class DotGpuOperations : public GpuOperations { public: DotGpuOperations(void) : GpuOperations("matrix_mul_dot.cl") {} }; class Float4GpuOperations : public GpuOperations { public: Float4GpuOperations(void) : GpuOperations("matrix_mul_float4.cl") {} }; class ConstantGpuOperations : public GpuOperations { public: ConstantGpuOperations(void) : GpuOperations("matrix_mul_constant.cl") {} }; #endif // _OPERATIONS_HPP<|fim▁end|>
#include "mem.hpp" class Operations {
<|file_name|>import_mr_sessions_stroop.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ## ## See COPYING file distributed along with the ncanda-data-integration package ## for the copyright and license terms ## from __future__ import print_function from builtins import str import os import re import tempfile import shutil from sibispy import sibislogger as slog from sibispy import utils as sutils<|fim▁hole|># Check for Stroop data (ePrime log file) in given XNAT session # import_bindir = os.path.join( os.path.dirname( os.path.dirname( os.path.abspath(__file__) ) ), 'import', 'laptops' ) bindir = os.path.dirname( os.path.abspath(__file__) ) # Check a list of experiments for ePrime Stroop files def check_for_stroop( xnat, xnat_eid_list, verbose=False ): stroop_files = [] if verbose : print("check_for_stroop: " + str(xnat_eid_list)) for xnat_eid in xnat_eid_list: experiment = xnat.select.experiments[ xnat_eid ] # Get list of resource files that match the Stroop file name pattern for resource in list(experiment.resources): resource_files = xnat._get_json( '/data/experiments/%s/resources/%s/files' % ( xnat_eid, resource ) ); stroop_files += [ (xnat_eid, resource, re.sub( '.*\/files\/', '', file['URI']) ) for file in resource_files if re.match( '^NCANDAStroopMtS_3cycles_7m53stask_.*.txt$', file['Name'] ) ] # No matching files - nothing to do if len( stroop_files ) == 0: if verbose : print("check_for_stroop: no stroop") return (None, None, None) # Get first file from list, warn if more files if len( stroop_files ) > 1: error = "ERROR: experiment have/has more than one Stroop .txt file. Please make sure there is exactly one per session." for xnat_eid in xnat_eid_list: slog.info(xnat_eid,error) return (None, None, None) if verbose : print("check_for_stroop: Stroop File: " + str(stroop_files[0])) return stroop_files[0] # Import a Stroop file into REDCap after scoring def import_stroop_to_redcap( xnat, stroop_eid, stroop_resource, stroop_file, \ redcap_key, verbose=False, no_upload=False, post_to_github=False, time_log_dir=None): if verbose: print("Importing Stroop data from file %s:%s" % ( stroop_eid, stroop_file )) # Download Stroop file from XNAT into temporary directory experiment = xnat.select.experiments[stroop_eid] tempdir = tempfile.mkdtemp() try: stroop_file_path = os.path.join( tempdir, stroop_file ) stroop_dir_path = os.path.dirname(stroop_file_path) if not os.path.isdir(stroop_dir_path): os.makedirs(stroop_dir_path) experiment.resources[stroop_resource].files[stroop_file].download( stroop_file_path, verbose=False ) except IOError as e: details = "Error: import_mr_sessions_stroop: unable to get copy resource {0} file {1} to {2}".format(stroop_resource, stroop_file, stroop_file_path) slog.info(str(redcap_key[0]) + "-" + str(redcap_key[1]), details, error_obj={ 'message': str(e), 'errno': e.errno, 'filename': e.filename, 'strerror': e.strerror }) return # Convert downloaded Stroop file to CSV scores file cmd = str(os.path.join(import_bindir, "stroop2csv")) + f' --mr-session --record "{redcap_key[0]}" --event "{redcap_key[1]}" "{str(stroop_file_path)}" "{str(tempdir)}"' (ecode,sout, serr) = sutils.call_shell_program(cmd) if ecode: slog.info(str(redcap_key[0]) + "-" + str(redcap_key[1]), "Error: import_stroop_to_redcap: failed to run stroop2csv!", cmd = str(cmd), stderr = str(serr), stdout = str(sout)) added_files = sout if len( added_files ): if not no_upload: # Upload CSV file(s) (should only be one anyway) for file in added_files.decode('utf-8').split( '\n' ): if re.match( '.*\.csv$', file ): if verbose: print("Uploading ePrime Stroop scores",file) cmd = str(os.path.join( bindir, 'csv2redcap' )) if post_to_github: cmd += " -p" if time_log_dir: cmd += " -t " + str(time_log_dir) cmd += " " + str(file) (ecode,sout, serr) = sutils.call_shell_program(cmd) if ecode: slog.info(str(redcap_key[0]) + "-" + str(redcap_key[1]), "Error: import_stroop_to_redcap: failed to run csv2redcap!", cmd = str(cmd), stderr = str(serr), stdout = str(sout)) # Upload original ePrime file for future reference cmd = str(os.path.join( import_bindir, "eprime2redcap" )) if post_to_github: cmd += " -p" cmd += f' --project data_entry --record {redcap_key[0]} --event {redcap_key[1]} "{str(stroop_file_path)}" mri_stroop_log_file' if verbose: print("Uploading ePrime Stroop file",stroop_file_path) # print " ".join(cmd_array) (ecode,sout, serr) = sutils.call_shell_program(cmd) if ecode: slog.info(str(redcap_key[0]) + "-" + str(redcap_key[1]), "Error: import_stroop_to_redcap: failed to run eprime2redcap!", cmd = str(cmd), stderr = str(serr), stdout = str(sout)) else: error = "ERROR: could not convert Stroop file %s:%s" % ( redcap_key[0], stroop_file ) slog.info(str(redcap_key[0]) + '-' + str(redcap_key[1]), error, stroop_file = stroop_file) shutil.rmtree( tempdir )<|fim▁end|>
#
<|file_name|>NodeIteratorTest.java<|end_file_name|><|fim▁begin|>package org.chasen.mecab.wrapper; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import org.junit.Test; public class NodeIteratorTest { @Test public void threads() throws InterruptedException { List<Thread> threads = new ArrayList<Thread>(); threads.add(new Thread(){ public void run(){ Tagger t = Tagger.create("-r /opt/local/etc/mecabrc"); for(MecabNode<Node, Path> node: t.iterator("本日は晴天なり")){ System.out.println(node.getSurface()); }<|fim▁hole|> Tagger t = Tagger.create("-r /opt/local/etc/mecabrc"); for(MecabNode<Node, Path> node: t.iterator("本日は雨です")){ System.out.println(node.getSurface()); } } }); threads.add(new Thread(){ public void run(){ Tagger t = Tagger.create("-r /opt/local/etc/mecabrc"); for(MecabNode<Node, Path> node: t.iterator("昨日は曇りでした")){ System.out.println(node.getSurface()); } } }); for(Thread th: threads){ th.start(); } for(Thread th: threads){ th.join(); } } @Test public void executors() throws InterruptedException, ExecutionException { class Hoge { public void parse(String str){ Tagger t = Tagger.create("-r /opt/local/etc/mecabrc"); for(MecabNode<Node, Path> node: t.iterator(str)){ System.out.println(node.getSurface()); } } } final Hoge hoge = new Hoge(); ExecutorService executors = Executors.newCachedThreadPool(); List<Future<?>> futures = new ArrayList<Future<?>>(); futures.add(executors.submit(new Callable<Void>(){ public Void call() throws Exception { hoge.parse("本日は晴天なり"); return null; } })); futures.add(executors.submit(new Callable<Void>(){ public Void call() throws Exception { hoge.parse("本日は雨です"); return null; } })); futures.add(executors.submit(new Callable<Void>(){ public Void call() throws Exception { hoge.parse("昨日は曇りでした"); return null; } })); for(Future<?> f: futures){ f.get(); } } @Test public void executors_runnable() throws InterruptedException, ExecutionException { class Hoge implements Runnable { String str; Hoge(String str){ this.str = str; } public void run(){ Tagger t = Tagger.create("-r /opt/local/etc/mecabrc"); for(MecabNode<Node, Path> node: t.iterator(str)){ System.out.println(node.getSurface()); } } } ExecutorService executors = Executors.newCachedThreadPool(); List<Future<?>> futures = new ArrayList<Future<?>>(); futures.add(executors.submit(new Hoge("本日は晴天なり"))); futures.add(executors.submit(new Hoge("本日は雨です"))); futures.add(executors.submit(new Hoge("昨日は曇りでした"))); for(Future<?> f: futures){ f.get(); } } }<|fim▁end|>
} }); threads.add(new Thread(){ public void run(){
<|file_name|>mod.py<|end_file_name|><|fim▁begin|># Copyright (C) 2008-2010 Adam Olsen # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2, or (at your option) # any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # # The developers of the Exaile media player hereby grant permission # for non-GPL compatible GStreamer and Exaile plugins to be used and # distributed together with GStreamer and Exaile. This permission is # above and beyond the permissions granted by the GPL license by which # Exaile is covered. If you modify this code, you may extend this # exception to your version of the code, but you are not obligated to # do so. If you do not wish to do so, delete this exception statement # from your version. from xl.metadata._base import BaseFormat import os try: import ctypes modplug = ctypes.cdll.LoadLibrary("libmodplug.so.0") modplug.ModPlug_Load.restype = ctypes.c_void_p modplug.ModPlug_Load.argtypes = (ctypes.c_void_p, ctypes.c_int) modplug.ModPlug_GetName.restype = ctypes.c_char_p modplug.ModPlug_GetName.argtypes = (ctypes.c_void_p,) modplug.ModPlug_GetLength.restype = ctypes.c_int modplug.ModPlug_GetLength.argtypes = (ctypes.c_void_p,) except (ImportError, OSError): modplug = None class ModFormat(BaseFormat): writable = False def load(self): if modplug: data = open(self.loc, "rb").read() f = modplug.ModPlug_Load(data, len(data)) if f: name = modplug.ModPlug_GetName(f) or os.path.split(self.loc)[-1] length = modplug.ModPlug_GetLength(f) / 1000.0 or -1 self.mutagen = {'title': name, '__length': length} else: self.mutagen = {} def get_length(self): try: return self.mutagen['__length'] except KeyError: return -1 def get_bitrate(self): return -1<|fim▁hole|> # vim: et sts=4 sw=4<|fim▁end|>
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>import unittest from pyramid import testing <|fim▁hole|> self.config = testing.setUp() def tearDown(self): testing.tearDown() def test_my_view(self): from .views import my_view request = testing.DummyRequest() info = my_view(request) self.assertEqual(info['project'], 'zodiacbauth')<|fim▁end|>
class ViewTests(unittest.TestCase): def setUp(self):
<|file_name|>test_protocol_peer.py<|end_file_name|><|fim▁begin|>############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### from __future__ import absolute_import import os # we need to select a txaio subsystem because we're importing the base # protocol classes here for testing purposes. "normally" you'd import # from autobahn.twisted.wamp or autobahn.asyncio.wamp explicitly. import txaio if os.environ.get('USE_TWISTED', False): txaio.use_twisted() else: txaio.use_asyncio() from autobahn import wamp from autobahn.wamp import message from autobahn.wamp import exception from autobahn.wamp import protocol import unittest2 as unittest class TestPeerExceptions(unittest.TestCase): def test_exception_from_message(self): session = protocol.BaseSession() @wamp.error(u"com.myapp.error1") class AppError1(Exception): pass @wamp.error(u"com.myapp.error2") class AppError2(Exception): pass session.define(AppError1) session.define(AppError2) # map defined errors to user exceptions emsg = message.Error(message.Call.MESSAGE_TYPE, 123456, u'com.myapp.error1') exc = session._exception_from_message(emsg) self.assertIsInstance(exc, AppError1) self.assertEqual(exc.args, ()) emsg = message.Error(message.Call.MESSAGE_TYPE, 123456, u'com.myapp.error2') exc = session._exception_from_message(emsg) self.assertIsInstance(exc, AppError2) self.assertEqual(exc.args, ()) # map undefined error to (generic) exception emsg = message.Error(message.Call.MESSAGE_TYPE, 123456, u'com.myapp.error3') exc = session._exception_from_message(emsg) self.assertIsInstance(exc, exception.ApplicationError) self.assertEqual(exc.error, u'com.myapp.error3') self.assertEqual(exc.args, ()) self.assertEqual(exc.kwargs, {}) emsg = message.Error(message.Call.MESSAGE_TYPE, 123456, u'com.myapp.error3', args=[1, 2, u'hello']) exc = session._exception_from_message(emsg) self.assertIsInstance(exc, exception.ApplicationError) self.assertEqual(exc.error, u'com.myapp.error3') self.assertEqual(exc.args, (1, 2, u'hello')) self.assertEqual(exc.kwargs, {}) emsg = message.Error(message.Call.MESSAGE_TYPE, 123456, u'com.myapp.error3', args=[1, 2, u'hello'], kwargs={u'foo': 23, u'bar': u'baz'}) exc = session._exception_from_message(emsg) self.assertIsInstance(exc, exception.ApplicationError) self.assertEqual(exc.error, u'com.myapp.error3') self.assertEqual(exc.args, (1, 2, u'hello')) self.assertEqual(exc.kwargs, {u'foo': 23, u'bar': u'baz'})<|fim▁hole|> def test_message_from_exception(self): session = protocol.BaseSession() @wamp.error(u"com.myapp.error1") class AppError1(Exception): pass @wamp.error(u"com.myapp.error2") class AppError2(Exception): pass session.define(AppError1) session.define(AppError2) exc = AppError1() msg = session._message_from_exception(message.Call.MESSAGE_TYPE, 123456, exc) self.assertEqual(msg.marshal(), [message.Error.MESSAGE_TYPE, message.Call.MESSAGE_TYPE, 123456, {}, "com.myapp.error1"])<|fim▁end|>
<|file_name|>app.d.ts<|end_file_name|><|fim▁begin|>/// <reference path="MonitoredSocket.d.ts" /><|fim▁hole|>/// <reference path="Sqlite.d.ts" /><|fim▁end|>
<|file_name|>CreatureAIRegistry.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2008-2013 TrinityCore <http://www.trinitycore.org/> * Copyright (C) 2005-2009 MaNGOS <http://getmangos.com/> * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the * Free Software Foundation; either version 2 of the License, or (at your * option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details.<|fim▁hole|> */ #include "stdafx.hpp" #include "PassiveAI.h" #include "ReactorAI.h" #include "CombatAI.h" #include "GuardAI.h" #include "PetAI.h" #include "TotemAI.h" #include "CreatureEventAI.h" #include "RandomMovementGenerator.h" #include "MovementGeneratorImpl.h" #include "CreatureAIRegistry.h" #include "WaypointMovementGenerator.h" #include "CreatureAIFactory.h" #include "SmartAI.h" namespace AIRegistry { void Initialize() { (new CreatureAIFactory<NullCreatureAI>("NullCreatureAI"))->RegisterSelf(); (new CreatureAIFactory<TriggerAI>("TriggerAI"))->RegisterSelf(); (new CreatureAIFactory<AggressorAI>("AggressorAI"))->RegisterSelf(); (new CreatureAIFactory<ReactorAI>("ReactorAI"))->RegisterSelf(); (new CreatureAIFactory<PassiveAI>("PassiveAI"))->RegisterSelf(); (new CreatureAIFactory<CritterAI>("CritterAI"))->RegisterSelf(); (new CreatureAIFactory<GuardAI>("GuardAI"))->RegisterSelf(); (new CreatureAIFactory<PetAI>("PetAI"))->RegisterSelf(); (new CreatureAIFactory<TotemAI>("TotemAI"))->RegisterSelf(); (new CreatureAIFactory<CombatAI>("CombatAI"))->RegisterSelf(); (new CreatureAIFactory<ArcherAI>("ArcherAI"))->RegisterSelf(); (new CreatureAIFactory<TurretAI>("TurretAI"))->RegisterSelf(); (new CreatureAIFactory<CreatureEventAI>("EventAI"))->RegisterSelf(); (new CreatureAIFactory<VehicleAI>("VehicleAI"))->RegisterSelf(); (new CreatureAIFactory<SmartAI>("SmartAI"))->RegisterSelf(); (new GameObjectAIFactory<GameObjectAI>("GameObjectAI"))->RegisterSelf(); (new GameObjectAIFactory<SmartGameObjectAI>("SmartGameObjectAI"))->RegisterSelf(); (new MovementGeneratorFactory<RandomMovementGenerator<Creature> >(RANDOM_MOTION_TYPE))->RegisterSelf(); (new MovementGeneratorFactory<WaypointMovementGenerator<Creature> >(WAYPOINT_MOTION_TYPE))->RegisterSelf(); } }<|fim▁end|>
* * You should have received a copy of the GNU General Public License along * with this program. If not, see <http://www.gnu.org/licenses/>.
<|file_name|>UserInterface.py<|end_file_name|><|fim▁begin|># PiTimer - Python Hardware Programming Education Project For Raspberry Pi # Copyright (C) 2015 Jason Birch # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. #/****************************************************************************/ #/* PiTimer - Step 8 - Controlling physical relays. */ #/* ------------------------------------------------------------------------ */ #/* V1.00 - 2015-07-04 - Jason Birch */ #/* ------------------------------------------------------------------------ */ #/* Class to handle user input, output display and interface state machine. */ #/****************************************************************************/ import string import operator import datetime import SystemTime import Schedule import ScheduleItem # Constants to define current user interface display. STATE_MAIN_MENU = 0 STATE_ADD_SCHEDULE = 1 STATE_DEL_SCHEDULE = 2 STATE_RELAY_STATES = 3 STATE_SCHEDULE = 4 STATE_SET_SYSTEM_TIME = 5<|fim▁hole|>MODE_CONFIRM = 1 class UserInterface: def __init__(self, NewWIndow, NewThisSchedule, NewThisRelays): # Store a reference to the system window class to display onto. self.ThisWindow = NewWIndow # Store a reference to the schedule class to display schedule inforamtion. self.ThisSchedule = NewThisSchedule # Store a reference to the relays class to display relay inforamtion. self.ThisRelays = NewThisRelays # Create an instance of the system time class, to display the system time. self.ThisSystemTime = SystemTime.SystemTime() # Display application splash screen on initialisation. self.DisplaySplash() # Buffer for input strings. self.InputBuffer = "" # List position, moved by user. self.SelectPos = 0 self.SelectID = 0 # Display the initial user interface, the main menu. self.InterfaceState = STATE_MAIN_MENU #/***************************************************/ #/* Display a splash screen for application startup */ #/* to show information about this application. */ #/***************************************************/ def DisplaySplash(self): self.ThisWindow.clear() self.ThisWindow.refresh() print("{:^20}".format("PiTimer") + "\r") print("{:^20}".format("2015-06-23") + "\r") print("{:^20}".format("Version 1.00") + "\r") print("{:^20}".format("(C) Jason Birch") + "\r") self.ThisWindow.refresh() #/***********************************************************************/ #/* Distribute key press events to the current user interface function. */ #/***********************************************************************/ def KeyPress(self, KeyCode): Result = KeyCode if self.InterfaceState == STATE_MAIN_MENU: Result = self.KeysMainMenu(KeyCode) elif self.InterfaceState == STATE_ADD_SCHEDULE: Result = self.KeysAddSchedule(KeyCode) elif self.InterfaceState == STATE_DEL_SCHEDULE: Result = self.KeysDelSchedule(KeyCode) elif self.InterfaceState == STATE_SCHEDULE: Result = self.KeysSchedule(KeyCode) elif self.InterfaceState == STATE_RELAY_STATES: Result = self.KeysRelayStates(KeyCode) elif self.InterfaceState == STATE_SET_SYSTEM_TIME: Result = self.KeysSetSystemTime(KeyCode) return Result #/****************************************************************/ #/* Certain user interface displays need to update every second. */ #/****************************************************************/ def DisplayRefresh(self): if self.InterfaceState == STATE_MAIN_MENU: self.DisplayMainMenu() elif self.InterfaceState == STATE_ADD_SCHEDULE: self.DisplayAddSchedule() elif self.InterfaceState == STATE_DEL_SCHEDULE: self.DisplayDelSchedule() elif self.InterfaceState == STATE_SCHEDULE: self.DisplaySchedule() elif self.InterfaceState == STATE_RELAY_STATES: self.DisplayRelayStates() elif self.InterfaceState == STATE_SET_SYSTEM_TIME: self.DisplaySetSystemTime() #/*******************************************************/ #/* Change the current user interface to a new display. */ #/*******************************************************/ def SetInterfaceState(self, NewInterfaceState): # Start on standard display mode. self.Mode = MODE_STANDARD # Clear the input buffer. self.InputBuffer = "" # Reset list selection position. self.SelectPos =0 self.SelectID = 0 self.InterfaceState = NewInterfaceState if self.InterfaceState == STATE_MAIN_MENU: self.DisplayMainMenu() elif self.InterfaceState == STATE_ADD_SCHEDULE: self.DisplayAddSchedule() elif self.InterfaceState == STATE_DEL_SCHEDULE: self.DisplayDelSchedule() elif self.InterfaceState == STATE_SCHEDULE: self.DisplaySchedule() elif self.InterfaceState == STATE_RELAY_STATES: self.DisplayRelayStates() elif self.InterfaceState == STATE_SET_SYSTEM_TIME: self.DisplaySetSystemTime() #/*********************************************************/ #/* Provided the input from the user and a mask to define */ #/* how to display the input, format a string to display. */ #/*********************************************************/ def GetMaskedInput(self, Mask, Input): InputCount = 0 Result = "" for Char in Mask: if Char == "#" and len(Input) > InputCount: Result += Input[InputCount:InputCount + 1] InputCount += 1 else: Result += Char return Result #/************************************************/ #/* Gather the input required for an input mask. */ #/************************************************/ def KeyMaskedInput(self, Mask, Input, KeyCode): # If a valid key is pressed, add to the input buffer. if len(Input) < Mask.count("#") and KeyCode >= ord("0") and KeyCode <= ord("9"): Input += chr(KeyCode) # If delete key is pressed, delete the last entered key. elif KeyCode == 127 and len(Input) > 0: Input = Input[:-1] return Input #/*****************************/ #/* MAIN MENU user interface. */ #/*****************************/ def DisplayMainMenu(self): self.ThisWindow.clear() self.ThisWindow.refresh() print("{:>20}".format(self.ThisSystemTime.SystemTimeString()) + "\r") print("{:^20}".format("1 Add 4 Schedule") + "\r") print("{:^20}".format("2 Delete 5 Set Time") + "\r") print("{:^20}".format("3 Relays 6 Shutdown") + "\r") self.ThisWindow.refresh() def KeysMainMenu(self, KeyCode): Result = KeyCode # If menu item 1 is selected, change to display add schedule. if KeyCode == ord("1"): self.SetInterfaceState(STATE_ADD_SCHEDULE) # If menu item 2 is selected, change to display del schedule. if KeyCode == ord("2"): self.SetInterfaceState(STATE_DEL_SCHEDULE) # If menu item 3 is selected, change to display relay states. if KeyCode == ord("3"): self.SetInterfaceState(STATE_RELAY_STATES) # If menu item 4 is selected, change to display schedule. if KeyCode == ord("4"): self.SetInterfaceState(STATE_SCHEDULE) # If menu item 5 is selected, change to display set system time. if KeyCode == ord("5"): self.SetInterfaceState(STATE_SET_SYSTEM_TIME) # If menu item 6 is selected, return ESC key to the application main loop. if KeyCode == ord("6"): Result = 27 return Result #/********************************/ #/* RELAY STATES user interface. */ #/********************************/ def DisplayRelayStates(self): self.ThisWindow.clear() self.ThisWindow.refresh() self.ThisRelays.DisplayRelayStates() self.ThisWindow.refresh() def KeysRelayStates(self, KeyCode): Result = KeyCode # If enter key is pressed, change to display main menu. if KeyCode == 10: self.SetInterfaceState(STATE_MAIN_MENU) return Result #/********************************/ #/* ADD SCHEDULE user interface. */ #/********************************/ def DisplayAddSchedule(self): self.ThisWindow.clear() self.ThisWindow.refresh() print("{:^20}".format("ADD SCHEDULE") + "\r") print(self.GetMaskedInput("####-##-## ##:##:##\r\nPeriod ### ##:##:##\r\nRelay ## State #\r", self.InputBuffer)) self.ThisWindow.refresh() def KeysAddSchedule(self, KeyCode): Result = KeyCode self.InputBuffer = self.KeyMaskedInput("####-##-## ##:##:## ### ##:##:## ## #", self.InputBuffer, KeyCode) # If enter key is pressed, change to display main menu. if KeyCode == 10: # If full user input has been gathered, add a schedule item. if len(self.InputBuffer) == 26: # Parse user input. UserInput = self.GetMaskedInput("####-##-## ##:##:## ### ##:##:## ## #", self.InputBuffer) RelayState = { "0":ScheduleItem.RELAY_OFF, "1":ScheduleItem.RELAY_ON, "2":ScheduleItem.RELAY_TOGGLE, }.get(UserInput[36:37], ScheduleItem.RELAY_TOGGLE) PeriodSeconds = string.atoi(UserInput[30:32]) + 60 * string.atoi(UserInput[27:29]) + 60 * 60 * string.atoi(UserInput[24:26]) + 24 * 60 * 60 * string.atoi(UserInput[20:23]) PeriodDays = operator.div(PeriodSeconds, 24 * 60 * 60) PeriodSeconds = operator.mod(PeriodSeconds, 24 * 60 * 60) # Add schedule item, ignore errors from invalid data entered. try: self.ThisSchedule.AddSchedule(string.atoi(UserInput[33:35]), datetime.datetime(string.atoi(UserInput[0:4]), string.atoi(UserInput[5:7]), string.atoi(UserInput[8:10]), string.atoi(UserInput[11:13]), string.atoi(UserInput[14:16]), string.atoi(UserInput[17:19])), RelayState, datetime.timedelta(PeriodDays, PeriodSeconds)) except: print("") self.ThisWindow.refresh() self.SetInterfaceState(STATE_MAIN_MENU) return Result #/********************************/ #/* DEL SCHEDULE user interface. */ #/********************************/ def DisplayDelSchedule(self): self.ThisWindow.clear() self.ThisWindow.refresh() if self.Mode == MODE_STANDARD: print("{:^20}".format("DELETE SCHEDULE") + "\r") print("\r") if self.ThisSchedule.GetItemCount(): self.SelectID = self.ThisSchedule.DisplaySchedule(self.SelectPos, 1) else: print("{:^20}".format("Empty") + "\r") elif self.Mode == MODE_CONFIRM: print("{:^20}".format("DELETE SCHEDULE") + "\r") print("\r") print("{:^20}".format("ARE YOU SURE?") + "\r") print("{:^20}".format("(4=N, 6=Y)") + "\r") self.ThisWindow.refresh() def KeysDelSchedule(self, KeyCode): Result = KeyCode if self.Mode == MODE_STANDARD: # If a key at the top of the keypad is pressed, move up the list. if (KeyCode == ord("1") or KeyCode == ord("2") or KeyCode == ord("3")) and self.SelectPos > 0: self.SelectPos -= 1 # If a key at the bottom of the keypad is pressed, move down the list. elif (KeyCode == ord("0") or KeyCode == ord("7") or KeyCode == ord("8") or KeyCode == ord("9")) and self.SelectPos < self.ThisSchedule.GetItemCount() - 1: self.SelectPos += 1 # If enter key is pressed, enter confirm mode. if KeyCode == 10: if self.ThisSchedule.GetItemCount(): self.Mode = MODE_CONFIRM else: self.SetInterfaceState(STATE_MAIN_MENU) # If delete key is pressed, change to display main menu. if KeyCode == 127: self.SetInterfaceState(STATE_MAIN_MENU) elif self.Mode == MODE_CONFIRM: if KeyCode == ord("4"): self.SetInterfaceState(STATE_MAIN_MENU) elif KeyCode == ord("6"): self.ThisSchedule.DelSchedule(self.SelectID) self.SetInterfaceState(STATE_MAIN_MENU) return Result #/************************************/ #/* CURRENT SCHEDULE user interface. */ #/************************************/ def DisplaySchedule(self): self.ThisWindow.clear() self.ThisWindow.refresh() if self.ThisSchedule.GetItemCount(): self.ThisSchedule.DisplaySchedule(self.SelectPos, 2) else: print("\r") print("{:^20}".format("Empty") + "\r") self.ThisWindow.refresh() def KeysSchedule(self, KeyCode): Result = KeyCode # If a key at the top of the keypad is pressed, move up the list. if (KeyCode == ord("1") or KeyCode == ord("2") or KeyCode == ord("3")) and self.SelectPos > 0: self.SelectPos -= 1 # If a key at the bottom of the keypad is pressed, move down the list. elif (KeyCode == ord("0") or KeyCode == ord("7") or KeyCode == ord("8") or KeyCode == ord("9")) and self.SelectPos < self.ThisSchedule.GetItemCount() - 1: self.SelectPos += 1 # If enter key is pressed, change to display main menu. elif KeyCode == 10: self.SetInterfaceState(STATE_MAIN_MENU) return Result #/***********************************/ #/* SET SYSTEM TIME user interface. */ #/***********************************/ def DisplaySetSystemTime(self): self.ThisWindow.clear() self.ThisWindow.refresh() print("{:^20}".format("SET SYSTEM TIME") + "\r") print(self.GetMaskedInput("####-##-## ##:##:##\r", self.InputBuffer)) self.ThisWindow.refresh() def KeysSetSystemTime(self, KeyCode): Result = KeyCode self.InputBuffer = self.KeyMaskedInput("####-##-## ##:##:##", self.InputBuffer, KeyCode) # If enter key is pressed, change to display main menu. if KeyCode == 10: # If full user input has been gathered, set the system time. if len(self.InputBuffer) == 14: # BOOKMARK: THIS IS A PLACEHOLDER FOR WHEN THE CLOCK MODLUE IS IMPLEMENTED. self.ThisSystemTime.SetSystemTime(self.GetMaskedInput("####-##-## ##:##:##", self.InputBuffer)) self.SetInterfaceState(STATE_MAIN_MENU) return Result<|fim▁end|>
STATE_SHUTDOWN = 6 # Constants to define display modes. MODE_STANDARD = 0
<|file_name|>waterfallsink_gl.py<|end_file_name|><|fim▁begin|># # Copyright 2008,2009 Free Software Foundation, Inc. # # This file is part of GNU Radio # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. #<|fim▁hole|># GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # ################################################## # Imports ################################################## import waterfall_window import common from gnuradio import gr, blks2 from pubsub import pubsub from constants import * ################################################## # Waterfall sink block (wrapper for old wxgui) ################################################## class _waterfall_sink_base(gr.hier_block2, common.wxgui_hb): """ An fft block with real/complex inputs and a gui window. """ def __init__( self, parent, baseband_freq=0, ref_level=50, sample_rate=1, fft_size=512, fft_rate=waterfall_window.DEFAULT_FRAME_RATE, average=False, avg_alpha=None, title='', size=waterfall_window.DEFAULT_WIN_SIZE, ref_scale=2.0, dynamic_range=80, num_lines=256, win=None, **kwargs #do not end with a comma ): #ensure avg alpha if avg_alpha is None: avg_alpha = 2.0/fft_rate #init gr.hier_block2.__init__( self, "waterfall_sink", gr.io_signature(1, 1, self._item_size), gr.io_signature(0, 0, 0), ) #blocks fft = self._fft_chain( sample_rate=sample_rate, fft_size=fft_size, frame_rate=fft_rate, ref_scale=ref_scale, avg_alpha=avg_alpha, average=average, win=win, ) msgq = gr.msg_queue(2) sink = gr.message_sink(gr.sizeof_float*fft_size, msgq, True) #controller self.controller = pubsub() self.controller.subscribe(AVERAGE_KEY, fft.set_average) self.controller.publish(AVERAGE_KEY, fft.average) self.controller.subscribe(AVG_ALPHA_KEY, fft.set_avg_alpha) self.controller.publish(AVG_ALPHA_KEY, fft.avg_alpha) self.controller.subscribe(SAMPLE_RATE_KEY, fft.set_sample_rate) self.controller.publish(SAMPLE_RATE_KEY, fft.sample_rate) self.controller.subscribe(DECIMATION_KEY, fft.set_decimation) self.controller.publish(DECIMATION_KEY, fft.decimation) self.controller.subscribe(FRAME_RATE_KEY, fft.set_vec_rate) self.controller.publish(FRAME_RATE_KEY, fft.frame_rate) #start input watcher common.input_watcher(msgq, self.controller, MSG_KEY) #create window self.win = waterfall_window.waterfall_window( parent=parent, controller=self.controller, size=size, title=title, real=self._real, fft_size=fft_size, num_lines=num_lines, baseband_freq=baseband_freq, decimation_key=DECIMATION_KEY, sample_rate_key=SAMPLE_RATE_KEY, frame_rate_key=FRAME_RATE_KEY, dynamic_range=dynamic_range, ref_level=ref_level, average_key=AVERAGE_KEY, avg_alpha_key=AVG_ALPHA_KEY, msg_key=MSG_KEY, ) common.register_access_methods(self, self.win) setattr(self.win, 'set_baseband_freq', getattr(self, 'set_baseband_freq')) #BACKWARDS #connect self.wxgui_connect(self, fft, sink) class waterfall_sink_f(_waterfall_sink_base): _fft_chain = blks2.logpwrfft_f _item_size = gr.sizeof_float _real = True class waterfall_sink_c(_waterfall_sink_base): _fft_chain = blks2.logpwrfft_c _item_size = gr.sizeof_gr_complex _real = False # ---------------------------------------------------------------- # Standalone test app # ---------------------------------------------------------------- import wx from gnuradio.wxgui import stdgui2 class test_top_block (stdgui2.std_top_block): def __init__(self, frame, panel, vbox, argv): stdgui2.std_top_block.__init__ (self, frame, panel, vbox, argv) fft_size = 512 # build our flow graph input_rate = 20.000e3 # Generate a complex sinusoid self.src1 = gr.sig_source_c (input_rate, gr.GR_SIN_WAVE, 5.75e3, 1000) #src1 = gr.sig_source_c (input_rate, gr.GR_CONST_WAVE, 5.75e3, 1000) # We add these throttle blocks so that this demo doesn't # suck down all the CPU available. Normally you wouldn't use these. self.thr1 = gr.throttle(gr.sizeof_gr_complex, input_rate) sink1 = waterfall_sink_c (panel, title="Complex Data", fft_size=fft_size, sample_rate=input_rate, baseband_freq=100e3) self.connect(self.src1, self.thr1, sink1) vbox.Add (sink1.win, 1, wx.EXPAND) # generate a real sinusoid self.src2 = gr.sig_source_f (input_rate, gr.GR_SIN_WAVE, 5.75e3, 1000) self.thr2 = gr.throttle(gr.sizeof_float, input_rate) sink2 = waterfall_sink_f (panel, title="Real Data", fft_size=fft_size, sample_rate=input_rate, baseband_freq=100e3) self.connect(self.src2, self.thr2, sink2) vbox.Add (sink2.win, 1, wx.EXPAND) def main (): app = stdgui2.stdapp (test_top_block, "Waterfall Sink Test App") app.MainLoop () if __name__ == '__main__': main ()<|fim▁end|>
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>def load_config(default_values, user_values): if user_values is None:<|fim▁hole|> for k, v in user_values.items(): if k in default_values: if isinstance(v, dict): cloned = user_values[k].copy() for key, value in default_values[k].items(): if key is not None and key not in user_values[k] \ or user_values[k][key] == '': cloned[key] = value config[k] = cloned else: config[k] = v else: config[k] = v for k, v in default_values.items(): if k not in config: config[k] = v return config def import_class(full_path): path_split = full_path.split('.') path = ".".join(path_split[:-1]) klass = path_split[-1:] mod = __import__(path, fromlist=[klass]) return getattr(mod, klass[0])<|fim▁end|>
return default_values config = {}
<|file_name|>instructions_a_test.go<|end_file_name|><|fim▁begin|>package chip8 import ( "testing" ) func Test0xAnnn(t *testing.T) {<|fim▁hole|> c := New([]byte{ 0xAF, 0xFF, }) c.Step() if c.i != 0xFFF { t.Error("i was not set to 0xFFF as expected") } }<|fim▁end|>
<|file_name|>FMTLexer.hpp<|end_file_name|><|fim▁begin|>#ifndef INC_FMTLexer_hpp_ #define INC_FMTLexer_hpp_ #include <antlr/config.hpp> /* $ANTLR 2.7.7 (20130428): "format.g" -> "FMTLexer.hpp"$ */ #include <antlr/CommonToken.hpp><|fim▁hole|>#include "FMTTokenTypes.hpp" #include <antlr/CharScanner.hpp> #include <fstream> #include <sstream> #include "fmtnode.hpp" #include "CFMTLexer.hpp" #include <antlr/TokenStreamSelector.hpp> //using namespace antlr; class CUSTOM_API FMTLexer : public antlr::CharScanner, public FMTTokenTypes { private: antlr::TokenStreamSelector* selector; CFMTLexer* cLexer; public: void SetSelector( antlr::TokenStreamSelector& s) { selector = &s; } void SetCLexer( CFMTLexer& l) { cLexer = &l; } private: void initLiterals(); public: bool getCaseSensitiveLiterals() const { return false; } public: FMTLexer(std::istream& in); FMTLexer(antlr::InputBuffer& ib); FMTLexer(const antlr::LexerSharedInputState& state); antlr::RefToken nextToken(); public: void mSTRING(bool _createToken); public: void mCSTRING(bool _createToken); public: void mLBRACE(bool _createToken); public: void mRBRACE(bool _createToken); public: void mSLASH(bool _createToken); public: void mCOMMA(bool _createToken); public: void mA(bool _createToken); public: void mTERM(bool _createToken); public: void mNONL(bool _createToken); public: void mF(bool _createToken); public: void mD(bool _createToken); public: void mE(bool _createToken); public: void mG(bool _createToken); public: void mI(bool _createToken); public: void mO(bool _createToken); public: void mB(bool _createToken); public: void mZ(bool _createToken); public: void mZZ(bool _createToken); public: void mQ(bool _createToken); public: void mH(bool _createToken); public: void mT(bool _createToken); public: void mL(bool _createToken); public: void mR(bool _createToken); public: void mX(bool _createToken); public: void mC(bool _createToken); public: void mCMOA(bool _createToken); public: void mCMoA(bool _createToken); public: void mCmoA(bool _createToken); public: void mCMOI(bool _createToken); public: void mCDI(bool _createToken); public: void mCMI(bool _createToken); public: void mCYI(bool _createToken); public: void mCSI(bool _createToken); public: void mCSF(bool _createToken); public: void mCHI(bool _createToken); public: void mChI(bool _createToken); public: void mCDWA(bool _createToken); public: void mCDwA(bool _createToken); public: void mCdwA(bool _createToken); public: void mCAPA(bool _createToken); public: void mCApA(bool _createToken); public: void mCapA(bool _createToken); public: void mPERCENT(bool _createToken); public: void mDOT(bool _createToken); public: void mPM(bool _createToken); public: void mMP(bool _createToken); protected: void mW(bool _createToken); public: void mWHITESPACE(bool _createToken); protected: void mDIGITS(bool _createToken); protected: void mCHAR(bool _createToken); public: void mNUMBER(bool _createToken); private: static const unsigned long _tokenSet_0_data_[]; static const antlr::BitSet _tokenSet_0; static const unsigned long _tokenSet_1_data_[]; static const antlr::BitSet _tokenSet_1; static const unsigned long _tokenSet_2_data_[]; static const antlr::BitSet _tokenSet_2; }; #endif /*INC_FMTLexer_hpp_*/<|fim▁end|>
#include <antlr/InputBuffer.hpp> #include <antlr/BitSet.hpp>
<|file_name|>a-dashboard_controller.js<|end_file_name|><|fim▁begin|>'use strict'; /* global angular */ (function() { var aDashboard = angular.module('aDashboard'); aDashboard.controller('ADashboardController', function( $scope, $rootScope, tradelistFactory, $timeout) { $scope.subState = $scope.$parent; $scope.accountValue; $scope.avgWin; $scope.avgLoss; $scope.avgTradeSize; $scope.$on('tradeActionUpdated', function(event, args) { var tradelist = args.tradelist; calculateValue( tradelist ); $scope.avgWin = calculateAvgWin( tradelist ).avg; $scope.winCount = calculateAvgWin( tradelist ).count;<|fim▁hole|> var getTradelist = function() { tradelistFactory.getTradelist() .then(function(tradelist) { }); }; getTradelist(); function calculateValue( tradelist ){ var sum = 0; tradelist.forEach(function(entry) { if( entry.tradeValue ){ sum += Number(entry.tradeValue); } }); $scope.accountValue = sum; }; function calculateAvgWin( tradelist ){ var sum = 0; var count = 0; tradelist.forEach(function(entry) { if( entry.tradeValue > 0 ){ ++count; sum += Number(entry.tradeValue); } }); return {avg: (sum / count).toFixed(2), count: count}; }; function calculateAvgLoss( tradelist ){ var sum = 0; var count = 0; tradelist.forEach(function(entry) { if( entry.tradeValue < 0 ){ ++count sum += Number(entry.tradeValue); } }); console.log('sum: ', sum); return {avg: (sum / count).toFixed(2), count: count}; }; function calculateAvgTradeSize( tradelist ){ var actionCount = 0; var sum = 0; tradelist.forEach(function(entry) { var actions = entry.actions; actions.forEach(function(action) { if( action.price && action.quantity ){ ++actionCount; sum = sum + (Math.abs(action.price * action.quantity)); } }); }); if( actionCount == 0 ){ actionCount = 1; } $scope.avgTradeSize = (sum / actionCount).toFixed(2); }; }); })();<|fim▁end|>
$scope.avgLoss = calculateAvgLoss( tradelist ).avg; $scope.lossCount = calculateAvgLoss( tradelist ).count; calculateAvgTradeSize( tradelist ); });
<|file_name|>pt.js<|end_file_name|><|fim▁begin|><|fim▁hole|>*/ CKEDITOR.plugins.setLang("placeholder","pt",{title:"Propriedades dos marcadores",toolbar:"Símbolo",name:"Nome do marcador",invalidName:"O marcador não pode estar em branco e não pode conter qualquer dos seguintes carateres: [, ], <, >",pathName:"símbolo"});<|fim▁end|>
/* Copyright (c) 2003-2015, CKSource - Frederico Knabben. All rights reserved. For licensing, see LICENSE.md or http://ckeditor.com/license
<|file_name|>client.rs<|end_file_name|><|fim▁begin|>use std; import core::result::*;<|fim▁hole|> assert count <= vec::len(bytes); ret str::from_bytes(vec::slice(bytes, 0u, count)); } fn main() { let host = "0.0.0.0"; let port = 12345u16; let sock = new_tcp_socket(); let addr = inet_addr(host); let sockaddr = new_sockaddr_in(af_inet, addr, port); let r = connect(sock, sockaddr); if failure(r) { fail #fmt("cannot connect to %s:%u", host, port as uint); } send_str(sock, "HELLO", 0u); let buf = vec::init_elt_mut(1024u, 0u8); let size = recv(sock, buf, 0u); let s = from_bytes_n(buf, size as uint); std::io::println(s); }<|fim▁end|>
import socket::*; fn from_bytes_n(bytes: [const u8], count: uint) -> str {
<|file_name|>ActivityOverview.js<|end_file_name|><|fim▁begin|>import React, { Component } from 'react'; import MePanel from '../components/panels/MePanel'; import { Segment } from 'semantic-ui-react' import DetailedActivity from './activities/DetailedActivity'; import ActivityGraph from './activities/ActivityGraph'; class ActivityOverview extends Component { constructor(props) { super(props); this.state = { selectedActivity: { activityId: "N/A", averageHeartRate: 0, date: null, description: "N/A", distance: 0, maxHeartRate: 0, pace: 0, time: 0, type: "N/A", location: null } }; } renderActivity(activityId) { let activity = this.props.activities.filter(activity => activity.activityId === activityId); this.setState({ selectedActivity: activity.length > 0 ? activity[0] : null }); } render() { return ( <Segment.Group horizontal> <Segment compact basic className="rt-statistics-segment"><|fim▁hole|> loading={this.props.loading} chartData={this.props.chartData} activities={this.props.activities} fromDate={this.props.fromDate} toDate={this.props.toDate} pointOnClickHandler={this.renderActivity.bind(this)} /> <DetailedActivity loading={this.props.loading} activity={this.state.selectedActivity} /> </Segment> </Segment.Group> ); } } export default ActivityOverview;<|fim▁end|>
<MePanel me={this.props.me} loading={this.props.loading} /> </Segment> <Segment padded loading={this.props.loading} basic> <ActivityGraph
<|file_name|>extended_encoder.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # # Copyright 2014 Free Software Foundation, Inc. # # This file is part of GNU Radio # # SPDX-License-Identifier: GPL-3.0-or-later # # from __future__ import absolute_import from __future__ import unicode_literals from gnuradio import gr, blocks from . import fec_swig as fec from .threaded_encoder import threaded_encoder from .capillary_threaded_encoder import capillary_threaded_encoder from .bitflip import read_bitlist class extended_encoder(gr.hier_block2): def __init__(self, encoder_obj_list, threading, puncpat=None): gr.hier_block2.__init__(self, "extended_encoder", gr.io_signature(1, 1, gr.sizeof_char), gr.io_signature(1, 1, gr.sizeof_char)) self.blocks=[] self.puncpat=puncpat if(type(encoder_obj_list) == list): if(type(encoder_obj_list[0]) == list): gr.log.info("fec.extended_encoder: Parallelism must be 1.") raise AttributeError else: # If it has parallelism of 0, force it into a list of 1 encoder_obj_list = [encoder_obj_list,] if fec.get_encoder_input_conversion(encoder_obj_list[0]) == "pack": self.blocks.append(blocks.pack_k_bits_bb(8)) if threading == 'capillary': self.blocks.append(capillary_threaded_encoder(encoder_obj_list, gr.sizeof_char, gr.sizeof_char)) elif threading == 'ordinary': self.blocks.append(threaded_encoder(encoder_obj_list, gr.sizeof_char, gr.sizeof_char)) else: self.blocks.append(fec.encoder(encoder_obj_list[0], gr.sizeof_char, gr.sizeof_char)) if fec.get_encoder_output_conversion(encoder_obj_list[0]) == "packed_bits": self.blocks.append(blocks.packed_to_unpacked_bb(1, gr.GR_MSB_FIRST)) if self.puncpat != '11': self.blocks.append(fec.puncture_bb(len(puncpat), read_bitlist(puncpat), 0))<|fim▁hole|> # Connect the input to the encoder and the output to the # puncture if used or the encoder if not. self.connect((self, 0), (self.blocks[0], 0)); self.connect((self.blocks[-1], 0), (self, 0)); # If using the puncture block, add it into the flowgraph after # the encoder. for i in range(len(self.blocks) - 1): self.connect((self.blocks[i], 0), (self.blocks[i+1], 0));<|fim▁end|>