code
stringlengths 3
1.05M
| repo_name
stringlengths 4
116
| path
stringlengths 4
991
| language
stringclasses 9
values | license
stringclasses 15
values | size
int32 3
1.05M
|
---|---|---|---|---|---|
class TripsController < ApplicationController
before_action :set_trip, only: [:show, :update, :destroy, :edit]
def index
if current_user
@created_trips = current_user.created_trips
@joined_trips = current_user.trips
else
@trips = Trip.order(:start_at).limit(25)
end
end
def new
@trip = Trip.new
end
def create
@trip = Trip.new(trip_params)
@trip.user_id = current_user.id
if @trip.save
redirect_to @trip
else
redirect_to '/'
end
end
def show
#For Mapbox
@coord = [[@trip.destination.lat, @trip.destination.lng],
[@trip.origin.lat, @trip.origin.lng]]
respond_to do |format|
format.html
format.json { render json: @coord }
end
end
private
def set_trip
@trip = Trip.find(params[:id])
end
def trip_params
params.require(:trip).permit(:origin_id, :destination_id, :start_at, :end_at)
end
end
| willot/raft-new | app/controllers/trips_controller.rb | Ruby | mit | 952 |
module RedditKit
class Client
# Methods for searching reddit's links.
module Search
# Search for links.
#
# @param query [String] The search query.
# @option options [String, RedditKit::Subreddit] subreddit The optional subreddit to search.
# @option options [true, false] restrict_to_subreddit Whether to search only in a specified subreddit.
# @option options [1..100] limit The number of links to return.
# @option options [String] count The number of results to return before or after. This is different from `limit`.
# @option options [relevance, new, hot, top, comments] sort The sorting order for search results.
# @option options [String] before Only return links before this full name.
# @option options [String] after Only return links after this full name.
# @option options [cloudsearch, lucene, plain] syntax Specify the syntax for the search. Learn more: http://www.reddit.com/r/redditdev/comments/1hpicu/whats_this_syntaxcloudsearch_do/cawm0fe
# @option options [hour, day, week, month, year, all] time Show results with a specific time period.
# @return [RedditKit::PaginatedResponse]
def search(query, options = {})
path = "%s/search.json" % ('r/' + options[:subreddit] if options[:subreddit])
parameters = { :q => query,
:restrict_sr => options[:restrict_to_subreddit],
:limit => options[:limit],
:count => options[:count],
:sort => options[:sort],
:before => options[:before],
:after => options[:after],
:syntax => options[:syntax],
:t => options[:time]
}
objects_from_response(:get, path, parameters)
end
end
end
end
| samsymons/RedditKit.rb | lib/redditkit/client/search.rb | Ruby | mit | 1,908 |
//APP
var app = angular.module('PortfolioApp', ['ngRoute', 'slick']);
//ROUTING
app.config(function ($routeProvider) {
"ngInject";
$routeProvider
.when('/', {
controller: "HomeController",
templateUrl: "js/angular/views/home-view.html"
})
.when('/work/:projectId', {
controller: 'ProjectController',
templateUrl: 'js/angular/views/project-view.html'
})
.otherwise({
redirectTo: '/'
});
});
//CONTROLLERS
app.controller('HomeController', ['$scope', 'projects', function($scope, projects) {
"ngInject";
projects.success(function(data) {
$scope.projects = data;
});
//init function for binding
function bindListeners() {
$("header").on("click", ".mobile-toggle", function() {
$(this).toggleClass("active");
})
$("header, .about").on("click", ".nav-link", function(e) {
e.preventDefault();
e.stopImmediatePropagation();
if($(window).width() <= 740)
$(".mobile-toggle").removeClass("active");
var anchor = $(this).attr("href");
$('html, body').animate({
scrollTop: $(anchor).offset().top - 70
}, 500);
})
}
//Home page initializations
angular.element(document).ready(function () {
bindListeners();
});
}]);
app.controller('ProjectController', ['$scope', '$routeParams', '$http',
function($scope, $routeParams, $http, $sce) {
"ngInject";
$scope.video = false;
$http.get('projects/' + $routeParams.projectId + '.json').success(function(data) {
$scope.detail = data;
})
.error(function(data) {
console.log("Failed to get data")
});
}
]);
//SERVICES
app.factory('projects', ['$http', function($http) {
"ngInject";
return $http.get('projects/project-list.json')
.success(function(data) {
return data;
})
.error(function(data) {
return data;
console.log("Failed to get data")
});
}]);
//FILTERS
app.filter('safe', function($sce) {
"ngInject";
return function(val) {
return $sce.trustAsHtml(val);
};
});
| michael-eightnine/frontend-portfolio | site/js/angular/app.js | JavaScript | mit | 1,907 |
// Copyright (c) 2018 Louis Wu
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
using System;
using System.Threading;
namespace Unicorn
{
public static class CancellationTokenSourceExtensions
{
public static void CancelAndDispose(this CancellationTokenSource cancellationTokenSource)
{
if (cancellationTokenSource == null)
{
return;
}
try
{
cancellationTokenSource.Cancel();
cancellationTokenSource.Dispose();
}
catch (ObjectDisposedException)
{
}
catch (AggregateException)
{
}
}
}
}
| FatJohn/UnicornToolkit | Library/Unicorn.Shared/Extension/CancellationTokenSourceExtensions.cs | C# | mit | 1,739 |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="description" content="Creative One Page Parallax Template">
<meta name="keywords" content="Creative, Onepage, Parallax, HTML5, Bootstrap, Popular, custom, personal, portfolio" />
<meta name="author" content="">
<title>Sistem Monitoring KP - Sisfor ITS</title>
<link href="<?php echo base_url().'/asset_umum/css/bootstrap.min.css';?>" rel="stylesheet">
<link href="<?php echo base_url().'/asset_umum/css/prettyPhoto.css';?>" rel="stylesheet">
<link href="<?php echo base_url().'/asset_umum/css/font-awesome.min.css';?>" rel="stylesheet">
<link href="<?php echo base_url().'/asset_umum/css/animate.css';?>" rel="stylesheet">
<link href="<?php echo base_url().'/asset_umum/css/main.css';?>" rel="stylesheet">
<link href="<?php echo base_url().'/asset_umum/css/responsive.css';?>" rel="stylesheet">
<!--[if lt IE 9]> <script src="js/html5shiv.js"></script>
<script src="js/respond.min.js"></script> <![endif]-->
<link rel="shortcut icon" href="images/ico/favicon.png">
<link rel="apple-touch-icon-precomposed" sizes="144x144" href="<?php echo base_url().'/asset_umum/images/ico/apple-touch-icon-144-precomposed.png';?>">
<link rel="apple-touch-icon-precomposed" sizes="114x114" href="<?php echo base_url().'/asset_umum/images/ico/apple-touch-icon-114-precomposed.png';?>">
<link rel="apple-touch-icon-precomposed" sizes="72x72" href="<?php echo base_url().'/asset_umum/images/ico/apple-touch-icon-72-precomposed.png';?>">
<link rel="apple-touch-icon-precomposed" href="<?php echo base_url().'/asset_umum/images/ico/apple-touch-icon-57-precomposed.png';?>">
</head><!--/head-->
<body>
<div class="preloader">
<div class="preloder-wrap">
<div class="preloder-inner">
<div class="ball"></div>
<div class="ball"></div>
<div class="ball"></div>
<div class="ball"></div>
<div class="ball"></div>
<div class="ball"></div>
<div class="ball"></div>
</div>
</div>
</div><!--/.preloader-->
<header id="navigation">
<div class="navbar navbar-inverse navbar-fixed-top" role="banner">
<div class="container">
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-collapse">
<span class="sr-only">Toggle navigation</span> <span class="icon-bar"></span> <span class="icon-bar"></span> <span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="#navigation"><h1><img src="<?php echo base_url().'/asset_umum/images/logo1.png';?>" alt="logo"></h1></a>
</div>
<div class="collapse navbar-collapse">
<ul class="nav navbar-nav navbar-right">
<li class="scroll active"><a href="#navigation">Home</a></li>
<li class="scroll"><a href="<?php echo base_url('index.php/c_user/lapor');?>">Laporan</a></li>
<li class="scroll"><a href="#history">History</a></li>
<li class="scroll"><a href="#contact">Contact</a></li>
<li class="scroll"><a href="<?php echo base_url('index.php/c_user/logout');?>">Logout</a></li>
</ul>
</div>
</div>
</div><!--/navbar-->
</header> <!--/#navigation-->
<section id="home">
<div class="home-pattern"></div>
<div id="main-carousel" class="carousel slide" data-ride="carousel">
<ol class="carousel-indicators">
<li data-target="#main-carousel" data-slide-to="0" class="active"></li>
<li data-target="#main-carousel" data-slide-to="1"></li>
<li data-target="#main-carousel" data-slide-to="2"></li>
</ol><!--/.carousel-indicators-->
<div class="carousel-inner">
<div class="item active" style="background-image: url(<?php echo base_url().'/asset_umum/images/slider/s1.jpg';?>)">
<div class="carousel-caption">
<div>
<h2 class="heading animated bounceInDown">Institut Teknologi Sepuluh Nopember</h2>
<p class="animated bounceInUp">Information Systems Department</p>
</div>
</div>
</div>
<div class="item" style="background-image: url(<?php echo base_url().'asset_umum/images/slider/s2.jpg';?>)">
<div class="carousel-caption"> <div>
<h2 class="heading animated bounceInDown"><span>Sistem Monitoring KP</span></h2>
<p class="animated bounceInUp">Sistem terintegrasi untuk mahasiswa Sistem Informasi</p>
</div>
</div>
</div>
<div class="item" style="background-image: url(<?php echo base_url().'asset_umum/images/slider/s3.jpg';?>)">
<div class="carousel-caption">
<div>
<h2 class="heading animated bounceInRight">Create and Submit!</h2>
<p class="animated bounceInLeft">Mudah dan praktis digunakan</p>
<a class="btn btn-default slider-btn animated bounceInUp" href="<?php echo base_url('index.php/c_user/logout');?>">Get Started</a>
</div>
</div>
</div>
</div><!--/.carousel-inner-->
<a class="carousel-left member-carousel-control hidden-xs" href="#main-carousel" data-slide="prev"><i class="fa fa-angle-left"></i></a>
<a class="carousel-right member-carousel-control hidden-xs" href="#main-carousel" data-slide="next"><i class="fa fa-angle-right"></i></a>
</div>
</section><!--/#home-->
<section id="history">
<div class="container">
<div class="row text-center clearfix">
<div class="col-sm-8 col-sm-offset-2">
<h2 class="title-one">History</h2>
<p class="blog-heading">Historis pengumpulan laporan KP</p>
</div>
</div>
<div class="row">
<div class="col-sm-4">
<div class="single-blog">
<img src="images/blog/1.jpg" alt="" />
<h2>Lorem ipsum dolor sit amet</h2>
<ul class="post-meta">
<li><i class="fa fa-pencil-square-o"></i><strong> Posted By:</strong> John</li>
<li><i class="fa fa-clock-o"></i><strong> Posted On:</strong> Apr 15 2014</li>
</ul>
<div class="blog-content">
<p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.</p>
</div>
<a href="" class="btn btn-primary" data-toggle="modal" data-target="#blog-detail">Read More</a>
</div>
<div class="modal fade" id="blog-detail" tabindex="-1" role="dialog" aria-hidden="true">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-body">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
<img src="images/blog/3.jpg" alt="" />
<h2>Lorem ipsum dolor sit amet</h2>
<p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.</p><p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.</p>
</div>
</div>
</div>
</div>
</div>
<div class="col-sm-4">
<div class="single-blog">
<img src="images/blog/2.jpg" alt="" />
<h2>Lorem ipsum dolor sit amet</h2>
<ul class="post-meta">
<li><i class="fa fa-pencil-square-o"></i><strong> Posted By:</strong> John</li>
<li><i class="fa fa-clock-o"></i><strong> Posted On:</strong> Apr 15 2014</li>
</ul>
<div class="blog-content">
<p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.</p>
</div>
<a href="" class="btn btn-primary" data-toggle="modal" data-target="#blog-two">Read More</a>
</div>
<div class="modal fade" id="blog-two" tabindex="-1" role="dialog" aria-hidden="true">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-body">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
<img src="images/blog/2.jpg" alt="" />
<h2>Lorem ipsum dolor sit amet</h2>
<p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.</p><p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.</p>
</div>
</div>
</div>
</div>
</div>
<div class="col-sm-4">
<div class="single-blog">
<img src="images/blog/3.jpg" alt="" />
<h2>Lorem ipsum dolor sit amet</h2>
<ul class="post-meta">
<li><i class="fa fa-pencil-square-o"></i><strong> Posted By:</strong> John</li>
<li><i class="fa fa-clock-o"></i><strong> Posted On:</strong> Apr 15 2014</li>
</ul>
<div class="blog-content">
<p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.</p>
</div>
<a href="" class="btn btn-primary" data-toggle="modal" data-target="#blog-three">Read More</a>
</div>
<div class="modal fade" id="blog-three" tabindex="-1" role="dialog" aria-hidden="true">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-body">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
<img src="images/blog/3.jpg" alt="" />
<h2>Lorem ipsum dolor sit amet</h2>
<p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.</p><p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.</p>
</div>
</div>
</div>
</div>
</div>
<div class="col-sm-4">
<div class="single-blog">
<img src="images/blog/3.jpg" alt="" />
<h2>Lorem ipsum dolor sit amet</h2>
<ul class="post-meta">
<li><i class="fa fa-pencil-square-o"></i><strong> Posted By:</strong> John</li>
<li><i class="fa fa-clock-o"></i><strong> Posted On:</strong> Apr 15 2014</li>
</ul>
<div class="blog-content">
<p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.</p>
</div>
<a href="" class="btn btn-primary" data-toggle="modal" data-target="#blog-four">Read More</a></div>
<div class="modal fade" id="blog-four" tabindex="-1" role="dialog" aria-hidden="true">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-body">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
<img src="images/blog/3.jpg" alt="" />
<h2>Lorem ipsum dolor sit amet</h2>
<p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.</p><p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.</p>
</div>
</div>
</div>
</div>
</div>
<div class="col-sm-4">
<div class="single-blog">
<img src="images/blog/2.jpg" alt="" />
<h2>Lorem ipsum dolor sit amet</h2>
<ul class="post-meta">
<li><i class="fa fa-pencil-square-o"></i><strong> Posted By:</strong> John</li>
<li><i class="fa fa-clock-o"></i><strong> Posted On:</strong> Apr 15 2014</li>
</ul>
<div class="blog-content">
<p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.</p>
</div>
<a href="" class="btn btn-primary" data-toggle="modal" data-target="#blog-six">Read More</a>
</div>
<div class="modal fade" id="blog-six" tabindex="-1" role="dialog" aria-hidden="true">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-body">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
<img src="images/blog/2.jpg" alt="" />
<h2>Lorem ipsum dolor sit amet</h2>
<p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.</p><p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.</p>
</div>
</div>
</div>
</div>
</div>
<div class="col-sm-4">
<div class="single-blog">
<img src="images/blog/1.jpg" alt="" />
<h2>Lorem ipsum dolor sit amet</h2>
<ul class="post-meta">
<li><i class="fa fa-pencil-square-o"></i><strong> Posted By:</strong> John</li>
<li><i class="fa fa-clock-o"></i><strong> Posted On:</strong> Apr 15 2014</li>
</ul>
<div class="blog-content">
<p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.</p>
</div>
<a href="" class="btn btn-primary" data-toggle="modal" data-target="#blog-seven">Read More</a>
</div>
<div class="modal fade" id="blog-seven" tabindex="-1" role="dialog" aria-hidden="true">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-body">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
<img src="images/blog/1.jpg" alt="" />
<h2>Lorem ipsum dolor sit amet</h2>
<p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.</p><p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.</p>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</section> <!--/#blog-->
<section id="contact">
<div class="container">
<div class="row text-center clearfix">
<div class="col-sm-8 col-sm-offset-2">
<div class="contact-heading">
<h2 class="title-one">Contact With Us</h2>
<p></p>
</div>
</div>
</div>
</div>
<div class="container">
<div class="contact-details">
<div class="pattern"></div>
<div class="row text-center clearfix">
<div class="col-sm-6">
<div class="contact-address">
<address><p><span>Departemen </span>Sistem Informasi</p><strong>
Sekretariat <br>
Lt. 2 Gd. Lama FTIf ITS <br>
Institut Teknologi Sepuluh Nopember, Sukolilo <br>
Surabaya, 60111 <br>
Indonesia <br>
Phone: +62 31 5999944 <br> </strong>
</address>
</div>
</div>
<div class="col-sm-6">
<div id="contact-details">
<div class="status alert alert-success" style="display: none"></div>
<div class="contact-address"><address><p>Sisfor <span>KP</span></p><strong>Jam Buka : 08.00-16.00<br> +62 31 5999944</strong><br></address>
<div class="social-icons">
<a href="https://ms-my.facebook.com/pages/Jurusan-Sistem-Informasi-ITS/149235835122966" class="facebook external" data-animate-hover="shake"><i class="fa fa-facebook"></i></a>
<a href="https://www.instagram.com/hmsi_its/" class="instagram external" data-animate-hover="shake"><i class="fa fa-instagram"></i></a>
<a href="https://mail.google.com/mail/?view=cm&fs=1&tf=1&to=jurusan@is.its.ac.id&su=Hello&shva=1" class="email external" data-animate-hover="shake"><i class="fa fa-envelope"></i></a>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</section> <!--/#contact-->
<footer id="footer">
<div class="container">
<div class="text-center">
<p>Copyright © 2017 - <a href="http://is.its.ac.id/">Information System Department</a> | ITS </p>
</div>
</div>
</footer> <!--/#footer-->
<script type="text/javascript" src="<?php echo base_url().'/asset_umum/js/jquery.js';?>"></script>
<script type="text/javascript" src="<?php echo base_url().'/asset_umum/js/bootstrap.min.js';?>"></script>
<script type="text/javascript" src="<?php echo base_url().'/asset_umum/js/smoothscroll.js';?>"></script>
<script type="text/javascript" src="<?php echo base_url().'/asset_umum/js/jquery.isotope.min.js';?>"></script>
<script type="text/javascript" src="<?php echo base_url().'/asset_umum/js/jquery.prettyPhoto.js';?>"></script>
<script type="text/javascript" src="<?php echo base_url().'/asset_umum/js/jquery.parallax.js';?>"></script>
<script type="text/javascript" src="<?php echo base_url().'/asset_umum/js/main.js';?>"></script>
</body>
</html>
| dinanandka/MonitoringKP_KPPL | application/views/welcome.php | PHP | mit | 21,424 |
let original: (fn: FrameRequestCallback) => number;
let requesters: any[] = [];
function fakeRaf(fn: FrameRequestCallback): number {
requesters.push(fn);
return requesters.length;
}
function use() {
original = window.requestAnimationFrame;
window.requestAnimationFrame = fakeRaf;
}
function restore() {
setTimeout(() => {
window.requestAnimationFrame = original;
}, 2000);
}
function step() {
let cur = requesters;
requesters = [];
cur.forEach(function(f) { return f(16); });
}
export default {
use,
restore,
step,
};
| motorcyclejs/dom | test/helpers/fake-raf.ts | TypeScript | mit | 552 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace MotoBotCore.Interfaces
{
public interface IChannel
{
string Name { get; set; }
string Motd { get; set; }
}
}
| luetm/MotoBot | MotoBotCore/Interfaces/IChannel.cs | C# | mit | 267 |
//-----------------------------------------------------------------------------
// Декодер Mpeg Layer 1,2,3
// Копонент звукового двигателя Шквал
// команда : AntiTank
// разработчик : Гилязетдинов Марат (Марыч)
//-----------------------------------------------------------------------------
// включения
#include <string.h>
#include <math.h>
#include "MpegDecoder.h"
void CDecompressMpeg::imdct_init()
{
int k, p, n;
double t, pi;
n = 18;
pi = 4.0 * atan(1.0);
t = pi / (4 * n);
for (p = 0; p < n; p++)
w[p] = (float) (2.0 * cos(t * (2 * p + 1)));
for (p = 0; p < 9; p++)
w2[p] = (float) (2.0 * cos(2 * t * (2 * p + 1)));
t = pi / (2 * n);
for (k = 0; k < 9; k++) {
for (p = 0; p < 4; p++)
coef[k][p] = (float) (cos(t * (2 * k) * (2 * p + 1)));
}
n = 6;
pi = 4.0 * atan(1.0);
t = pi / (4 * n);
for (p = 0; p < n; p++)
v[p] = (float) (2.0 * cos(t * (2 * p + 1)));
for (p = 0; p < 3; p++)
v2[p] = (float) (2.0 * cos(2 * t * (2 * p + 1)));
t = pi / (2 * n);
k = 1;
p = 0;
coef87 = (float) (cos(t * (2 * k) * (2 * p + 1)));
for (p = 0; p < 6; p++)
v[p] = v[p] / 2.0f;
coef87 = (float) (2.0 * coef87);
}
void CDecompressMpeg::imdct18(float f[18]) /* 18 point */
{
int p;
float a[9], b[9];
float ap, bp, a8p, b8p;
float g1, g2;
for (p = 0; p < 4; p++) {
g1 = w[p] * f[p];
g2 = w[17 - p] * f[17 - p];
ap = g1 + g2; // a[p]
bp = w2[p] * (g1 - g2); // b[p]
g1 = w[8 - p] * f[8 - p];
g2 = w[9 + p] * f[9 + p];
a8p = g1 + g2; // a[8-p]
b8p = w2[8 - p] * (g1 - g2); // b[8-p]
a[p] = ap + a8p;
a[5 + p] = ap - a8p;
b[p] = bp + b8p;
b[5 + p] = bp - b8p;
}
g1 = w[p] * f[p];
g2 = w[17 - p] * f[17 - p];
a[p] = g1 + g2;
b[p] = w2[p] * (g1 - g2);
f[0] = 0.5f * (a[0] + a[1] + a[2] + a[3] + a[4]);
f[1] = 0.5f * (b[0] + b[1] + b[2] + b[3] + b[4]);
f[2] = coef[1][0] * a[5] +
coef[1][1] * a[6] +
coef[1][2] * a[7] +
coef[1][3] * a[8];
f[3] = coef[1][0] * b[5] +
coef[1][1] * b[6] +
coef[1][2] * b[7] +
coef[1][3] * b[8] -
f[1];
f[1] = f[1] - f[0];
f[2] = f[2] - f[1];
f[4] = coef[2][0] * a[0] +
coef[2][1] * a[1] +
coef[2][2] * a[2] +
coef[2][3] * a[3] -
a[4];
f[5] = coef[2][0] * b[0] +
coef[2][1] * b[1] +
coef[2][2] * b[2] +
coef[2][3] * b[3] -
b[4] -
f[3];
f[3] = f[3] - f[2];
f[4] = f[4] - f[3];
f[6] = coef[3][0] * (a[5] - a[7] - a[8]);
f[7] = coef[3][0] * (b[5] - b[7] - b[8]) - f[5];
f[5] = f[5] - f[4];
f[6] = f[6] - f[5];
f[8] = coef[4][0] * a[0] +
coef[4][1] * a[1] +
coef[4][2] * a[2] +
coef[4][3] * a[3] +
a[4];
f[9] = coef[4][0] * b[0] +
coef[4][1] * b[1] +
coef[4][2] * b[2] +
coef[4][3] * b[3] +
b[4] -
f[7];
f[7] = f[7] - f[6];
f[8] = f[8] - f[7];
f[10] = coef[5][0] * a[5] +
coef[5][1] * a[6] +
coef[5][2] * a[7] +
coef[5][3] * a[8];
f[11] = coef[5][0] * b[5] +
coef[5][1] * b[6] +
coef[5][2] * b[7] +
coef[5][3] * b[8] -
f[9];
f[9] = f[9] - f[8];
f[10] = f[10] - f[9];
f[12] = 0.5f * (a[0] + a[2] + a[3]) - a[1] - a[4];
f[13] = 0.5f * (b[0] + b[2] + b[3]) - b[1] - b[4] - f[11];
f[11] = f[11] - f[10];
f[12] = f[12] - f[11];
f[14] = coef[7][0] * a[5] +
coef[7][1] * a[6] +
coef[7][2] * a[7] +
coef[7][3] * a[8];
f[15] = coef[7][0] * b[5] +
coef[7][1] * b[6] +
coef[7][2] * b[7] +
coef[7][3] * b[8] -
f[13];
f[13] = f[13] - f[12];
f[14] = f[14] - f[13];
f[16] = coef[8][0] * a[0] +
coef[8][1] * a[1] +
coef[8][2] * a[2] +
coef[8][3] * a[3] +
a[4];
f[17] = coef[8][0] * b[0] +
coef[8][1] * b[1] +
coef[8][2] * b[2] +
coef[8][3] * b[3] +
b[4] -
f[15];
f[15] = f[15] - f[14];
f[16] = f[16] - f[15];
f[17] = f[17] - f[16];
}
/*--------------------------------------------------------------------*/
/* does 3, 6 pt dct. changes order from f[i][window] c[window][i] */
void CDecompressMpeg::imdct6_3(float f[]) /* 6 point */
{
int w;
float buf[18];
float* a,* c; // b[i] = a[3+i]
float g1, g2;
float a02, b02;
c = f;
a = buf;
for (w = 0; w < 3; w++) {
g1 = v[0] * f[3 * 0];
g2 = v[5] * f[3 * 5];
a[0] = g1 + g2;
a[3 + 0] = v2[0] * (g1 - g2);
g1 = v[1] * f[3 * 1];
g2 = v[4] * f[3 * 4];
a[1] = g1 + g2;
a[3 + 1] = v2[1] * (g1 - g2);
g1 = v[2] * f[3 * 2];
g2 = v[3] * f[3 * 3];
a[2] = g1 + g2;
a[3 + 2] = v2[2] * (g1 - g2);
a += 6;
f++;
}
a = buf;
for (w = 0; w < 3; w++) {
a02 = (a[0] + a[2]);
b02 = (a[3 + 0] + a[3 + 2]);
c[0] = a02 + a[1];
c[1] = b02 + a[3 + 1];
c[2] = coef87 * (a[0] - a[2]);
c[3] = coef87 * (a[3 + 0] - a[3 + 2]) - c[1];
c[1] = c[1] - c[0];
c[2] = c[2] - c[1];
c[4] = a02 - a[1] - a[1];
c[5] = b02 - a[3 + 1] - a[3 + 1] - c[3];
c[3] = c[3] - c[2];
c[4] = c[4] - c[3];
c[5] = c[5] - c[4];
a += 6;
c += 6;
}
}
void CDecompressMpeg::fdct_init() /* gen coef for N=32 (31 coefs) */
{
int p, n, i, k;
double t, pi;
pi = 4.0 * atan(1.0);
n = 16;
k = 0;
for (i = 0; i < 5; i++, n = n / 2) {
for (p = 0; p < n; p++, k++) {
t = (pi / (4 * n)) * (2 * p + 1);
coef32[k] = (float) (0.50 / cos(t));
}
}
}
void CDecompressMpeg::forward_bf(int m, int n, float x[], float f[],
float coef[])
{
int i, j, n2;
int p, q, p0, k;
p0 = 0;
n2 = n >> 1;
for (i = 0; i < m; i++, p0 += n) {
k = 0;
p = p0;
q = p + n - 1;
for (j = 0; j < n2; j++, p++, q--, k++) {
f[p] = x[p] + x[q];
f[n2 + p] = coef[k] * (x[p] - x[q]);
}
}
}
void CDecompressMpeg::back_bf(int m, int n, float x[], float f[])
{
int i, j, n2, n21;
int p, q, p0;
p0 = 0;
n2 = n >> 1;
n21 = n2 - 1;
for (i = 0; i < m; i++, p0 += n) {
p = p0;
q = p0;
for (j = 0; j < n2; j++, p += 2, q++)
f[p] = x[q];
p = p0 + 1;
for (j = 0; j < n21; j++, p += 2, q++)
f[p] = x[q] + x[q + 1];
f[p] = x[q];
}
}
void CDecompressMpeg::fdct32(float x[], float c[])
{
float a[32]; /* ping pong buffers */
float b[32];
int p, q;
// если эквалайзер включен занести значения
/* if (m_enableEQ) {
for (p = 0; p < 32; p++)
x[p] *= m_equalizer[p];
}*/
/* special first stage */
for (p = 0, q = 31; p < 16; p++, q--) {
a[p] = x[p] + x[q];
a[16 + p] = coef32[p] * (x[p] - x[q]);
}
forward_bf(2, 16, a, b, coef32 + 16);
forward_bf(4, 8, b, a, coef32 + 16 + 8);
forward_bf(8, 4, a, b, coef32 + 16 + 8 + 4);
forward_bf(16, 2, b, a, coef32 + 16 + 8 + 4 + 2);
back_bf(8, 4, a, b);
back_bf(4, 8, b, a);
back_bf(2, 16, a, b);
back_bf(1, 32, b, c);
}
void CDecompressMpeg::fdct32_dual(float x[], float c[])
{
float a[32]; /* ping pong buffers */
float b[32];
int p, pp, qq;
/* if (m_enableEQ) {
for (p = 0; p < 32; p++)
x[p] *= m_equalizer[p];
}*/
/* special first stage for dual chan (interleaved x) */
pp = 0;
qq = 2 * 31;
for (p = 0; p < 16; p++, pp += 2, qq -= 2) {
a[p] = x[pp] + x[qq];
a[16 + p] = coef32[p] * (x[pp] - x[qq]);
}
forward_bf(2, 16, a, b, coef32 + 16);
forward_bf(4, 8, b, a, coef32 + 16 + 8);
forward_bf(8, 4, a, b, coef32 + 16 + 8 + 4);
forward_bf(16, 2, b, a, coef32 + 16 + 8 + 4 + 2);
back_bf(8, 4, a, b);
back_bf(4, 8, b, a);
back_bf(2, 16, a, b);
back_bf(1, 32, b, c);
}
void CDecompressMpeg::fdct32_dual_mono(float x[], float c[])
{
float a[32]; /* ping pong buffers */
float b[32];
float t1, t2;
int p, pp, qq;
/* special first stage */
pp = 0;
qq = 2 * 31;
for (p = 0; p < 16; p++, pp += 2, qq -= 2) {
t1 = 0.5F * (x[pp] + x[pp + 1]);
t2 = 0.5F * (x[qq] + x[qq + 1]);
a[p] = t1 + t2;
a[16 + p] = coef32[p] * (t1 - t2);
}
forward_bf(2, 16, a, b, coef32 + 16);
forward_bf(4, 8, b, a, coef32 + 16 + 8);
forward_bf(8, 4, a, b, coef32 + 16 + 8 + 4);
forward_bf(16, 2, b, a, coef32 + 16 + 8 + 4 + 2);
back_bf(8, 4, a, b);
back_bf(4, 8, b, a);
back_bf(2, 16, a, b);
back_bf(1, 32, b, c);
}
void CDecompressMpeg::fdct16(float x[], float c[])
{
float a[16]; /* ping pong buffers */
float b[16];
int p, q;
/* special first stage (drop highest sb) */
a[0] = x[0];
a[8] = coef32[16] * x[0];
for (p = 1, q = 14; p < 8; p++, q--) {
a[p] = x[p] + x[q];
a[8 + p] = coef32[16 + p] * (x[p] - x[q]);
}
forward_bf(2, 8, a, b, coef32 + 16 + 8);
forward_bf(4, 4, b, a, coef32 + 16 + 8 + 4);
forward_bf(8, 2, a, b, coef32 + 16 + 8 + 4 + 2);
back_bf(4, 4, b, a);
back_bf(2, 8, a, b);
back_bf(1, 16, b, c);
}
void CDecompressMpeg::fdct16_dual(float x[], float c[])
{
float a[16]; /* ping pong buffers */
float b[16];
int p, pp, qq;
/* special first stage for interleaved input */
a[0] = x[0];
a[8] = coef32[16] * x[0];
pp = 2;
qq = 2 * 14;
for (p = 1; p < 8; p++, pp += 2, qq -= 2) {
a[p] = x[pp] + x[qq];
a[8 + p] = coef32[16 + p] * (x[pp] - x[qq]);
}
forward_bf(2, 8, a, b, coef32 + 16 + 8);
forward_bf(4, 4, b, a, coef32 + 16 + 8 + 4);
forward_bf(8, 2, a, b, coef32 + 16 + 8 + 4 + 2);
back_bf(4, 4, b, a);
back_bf(2, 8, a, b);
back_bf(1, 16, b, c);
}
void CDecompressMpeg::fdct16_dual_mono(float x[], float c[])
{
float a[16]; /* ping pong buffers */
float b[16];
float t1, t2;
int p, pp, qq;
/* special first stage */
a[0] = 0.5F * (x[0] + x[1]);
a[8] = coef32[16] * a[0];
pp = 2;
qq = 2 * 14;
for (p = 1; p < 8; p++, pp += 2, qq -= 2) {
t1 = 0.5F * (x[pp] + x[pp + 1]);
t2 = 0.5F * (x[qq] + x[qq + 1]);
a[p] = t1 + t2;
a[8 + p] = coef32[16 + p] * (t1 - t2);
}
forward_bf(2, 8, a, b, coef32 + 16 + 8);
forward_bf(4, 4, b, a, coef32 + 16 + 8 + 4);
forward_bf(8, 2, a, b, coef32 + 16 + 8 + 4 + 2);
back_bf(4, 4, b, a);
back_bf(2, 8, a, b);
back_bf(1, 16, b, c);
}
void CDecompressMpeg::fdct8(float x[], float c[])
{
float a[8]; /* ping pong buffers */
float b[8];
int p, q;
/* special first stage */
b[0] = x[0] + x[7];
b[4] = coef32[16 + 8] * (x[0] - x[7]);
for (p = 1, q = 6; p < 4; p++, q--) {
b[p] = x[p] + x[q];
b[4 + p] = coef32[16 + 8 + p] * (x[p] - x[q]);
}
forward_bf(2, 4, b, a, coef32 + 16 + 8 + 4);
forward_bf(4, 2, a, b, coef32 + 16 + 8 + 4 + 2);
back_bf(2, 4, b, a);
back_bf(1, 8, a, c);
}
void CDecompressMpeg::fdct8_dual(float x[], float c[])
{
float a[8]; /* ping pong buffers */
float b[8];
int p, pp, qq;
/* special first stage for interleaved input */
b[0] = x[0] + x[14];
b[4] = coef32[16 + 8] * (x[0] - x[14]);
pp = 2;
qq = 2 * 6;
for (p = 1; p < 4; p++, pp += 2, qq -= 2) {
b[p] = x[pp] + x[qq];
b[4 + p] = coef32[16 + 8 + p] * (x[pp] - x[qq]);
}
forward_bf(2, 4, b, a, coef32 + 16 + 8 + 4);
forward_bf(4, 2, a, b, coef32 + 16 + 8 + 4 + 2);
back_bf(2, 4, b, a);
back_bf(1, 8, a, c);
}
void CDecompressMpeg::fdct8_dual_mono(float x[], float c[])
{
float a[8]; /* ping pong buffers */
float b[8];
float t1, t2;
int p, pp, qq;
/* special first stage */
t1 = 0.5F * (x[0] + x[1]);
t2 = 0.5F * (x[14] + x[15]);
b[0] = t1 + t2;
b[4] = coef32[16 + 8] * (t1 - t2);
pp = 2;
qq = 2 * 6;
for (p = 1; p < 4; p++, pp += 2, qq -= 2) {
t1 = 0.5F * (x[pp] + x[pp + 1]);
t2 = 0.5F * (x[qq] + x[qq + 1]);
b[p] = t1 + t2;
b[4 + p] = coef32[16 + 8 + p] * (t1 - t2);
}
forward_bf(2, 4, b, a, coef32 + 16 + 8 + 4);
forward_bf(4, 2, a, b, coef32 + 16 + 8 + 4 + 2);
back_bf(2, 4, b, a);
back_bf(1, 8, a, c);
}
void CDecompressMpeg::bitget_init(unsigned char* buf)
{
bs_ptr0 = bs_ptr = buf;
bits = 0;
bitbuf = 0;
}
int CDecompressMpeg::bitget(int n)
{
unsigned int x;
if (bits < n) {
/* refill bit buf if necessary */
while (bits <= 24) {
bitbuf = (bitbuf << 8) | *bs_ptr++;
bits += 8;
}
}
bits -= n;
x = bitbuf >> bits;
bitbuf -= x << bits;
return x;
}
void CDecompressMpeg::bitget_skip(int n)
{
unsigned int k;
if (bits < n) {
n -= bits;
k = n >> 3;
/*--- bytes = n/8 --*/
bs_ptr += k;
n -= k << 3;
bitbuf = *bs_ptr++;
bits = 8;
}
bits -= n;
bitbuf -= (bitbuf >> bits) << bits;
}
void CDecompressMpeg::bitget_init_end(unsigned char* buf_end)
{
bs_ptr_end = buf_end;
}
int CDecompressMpeg::bitget_overrun()
{
return bs_ptr > bs_ptr_end;
}
int CDecompressMpeg::bitget_bits_used()
{
unsigned int n;
n = ((bs_ptr - bs_ptr0) << 3) - bits;
return n;
}
void CDecompressMpeg::bitget_check(int n)
{
if (bits < n) {
while (bits <= 24) {
bitbuf = (bitbuf << 8) | *bs_ptr++;
bits += 8;
}
}
}
/* only huffman */
/*----- get n bits - checks for n+2 avail bits (linbits+sign) -----*/
int CDecompressMpeg::bitget_lb(int n)
{
unsigned int x;
if (bits < (n + 2)) {
/* refill bit buf if necessary */
while (bits <= 24) {
bitbuf = (bitbuf << 8) | *bs_ptr++;
bits += 8;
}
}
bits -= n;
x = bitbuf >> bits;
bitbuf -= x << bits;
return x;
}
/*------------- get n bits but DO NOT remove from bitstream --*/
int CDecompressMpeg::bitget2(int n)
{
unsigned int x;
if (bits < (MAXBITS + 2)) {
/* refill bit buf if necessary */
while (bits <= 24) {
bitbuf = (bitbuf << 8) | *bs_ptr++;
bits += 8;
}
}
x = bitbuf >> (bits - n);
return x;
}
/*------------- remove n bits from bitstream ---------*/
void CDecompressMpeg::bitget_purge(int n)
{
bits -= n;
bitbuf -= (bitbuf >> bits) << bits;
}
void CDecompressMpeg::mac_bitget_check(int n)
{
if (bits < n) {
while (bits <= 24) {
bitbuf = (bitbuf << 8) | *bs_ptr++;
bits += 8;
}
}
}
int CDecompressMpeg::mac_bitget(int n)
{
unsigned int code;
bits -= n;
code = bitbuf >> bits;
bitbuf -= code << bits;
return code;
}
int CDecompressMpeg::mac_bitget2(int n)
{
return (bitbuf >> (bits - n));
}
int CDecompressMpeg::mac_bitget_1bit()
{
unsigned int code;
bits--;
code = bitbuf >> bits;
bitbuf -= code << bits;
return code;
}
void CDecompressMpeg::mac_bitget_purge(int n)
{
bits -= n;
bitbuf -= (bitbuf >> bits) << bits;
}
void CDecompressMpeg::windowB(float* vbuf, int vb_ptr, unsigned char* pcm)
{
int i, j;
int si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 16;
bx = (si + 32) & 511;
coef = wincoef;
/*-- first 16 --*/
for (i = 0; i < 16; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si = (si + 64) & 511;
sum -= (*coef++) * vbuf[bx];
bx = (bx + 64) & 511;
}
si++;
bx--;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = ((unsigned char) (tmp >> 8)) ^ 0x80;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx = (bx + 64) & 511;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = ((unsigned char) (tmp >> 8)) ^ 0x80;
/*-- last 15 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 15; i++) {
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si = (si + 64) & 511;
sum += (*coef--) * vbuf[bx];
bx = (bx + 64) & 511;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = ((unsigned char) (tmp >> 8)) ^ 0x80;
}
}
void CDecompressMpeg::windowB_dual(float* vbuf, int vb_ptr, unsigned char* pcm)
{
int i, j; /* dual window interleaves output */
int si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 16;
bx = (si + 32) & 511;
coef = wincoef;
/*-- first 16 --*/
for (i = 0; i < 16; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si = (si + 64) & 511;
sum -= (*coef++) * vbuf[bx];
bx = (bx + 64) & 511;
}
si++;
bx--;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = ((unsigned char) (tmp >> 8)) ^ 0x80;
pcm += 2;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx = (bx + 64) & 511;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = ((unsigned char) (tmp >> 8)) ^ 0x80;
pcm += 2;
/*-- last 15 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 15; i++) {
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si = (si + 64) & 511;
sum += (*coef--) * vbuf[bx];
bx = (bx + 64) & 511;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = ((unsigned char) (tmp >> 8)) ^ 0x80;
pcm += 2;
}
}
void CDecompressMpeg::windowB16(float* vbuf, int vb_ptr, unsigned char* pcm)
{
int i, j;
unsigned char si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 8;
bx = si + 16;
coef = wincoef;
/*-- first 8 --*/
for (i = 0; i < 8; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si += 32;
sum -= (*coef++) * vbuf[bx];
bx += 32;
}
si++;
bx--;
coef += 16;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = ((unsigned char) (tmp >> 8)) ^ 0x80;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx += 32;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = ((unsigned char) (tmp >> 8)) ^ 0x80;
/*-- last 7 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 7; i++) {
coef -= 16;
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si += 32;
sum += (*coef--) * vbuf[bx];
bx += 32;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = ((unsigned char) (tmp >> 8)) ^ 0x80;
}
}
void CDecompressMpeg::windowB16_dual(float* vbuf, int vb_ptr,
unsigned char* pcm)
{
int i, j;
unsigned char si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 8;
bx = si + 16;
coef = wincoef;
/*-- first 8 --*/
for (i = 0; i < 8; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si += 32;
sum -= (*coef++) * vbuf[bx];
bx += 32;
}
si++;
bx--;
coef += 16;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = ((unsigned char) (tmp >> 8)) ^ 0x80;
pcm += 2;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx += 32;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = ((unsigned char) (tmp >> 8)) ^ 0x80;
pcm += 2;
/*-- last 7 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 7; i++) {
coef -= 16;
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si += 32;
sum += (*coef--) * vbuf[bx];
bx += 32;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = ((unsigned char) (tmp >> 8)) ^ 0x80;
pcm += 2;
}
}
void CDecompressMpeg::windowB8(float* vbuf, int vb_ptr, unsigned char* pcm)
{
int i, j;
int si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 4;
bx = (si + 8) & 127;
coef = wincoef;
/*-- first 4 --*/
for (i = 0; i < 4; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si = (si + 16) & 127;
sum -= (*coef++) * vbuf[bx];
bx = (bx + 16) & 127;
}
si++;
bx--;
coef += 48;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = ((unsigned char) (tmp >> 8)) ^ 0x80;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx = (bx + 16) & 127;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = ((unsigned char) (tmp >> 8)) ^ 0x80;
/*-- last 3 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 3; i++) {
coef -= 48;
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si = (si + 16) & 127;
sum += (*coef--) * vbuf[bx];
bx = (bx + 16) & 127;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = ((unsigned char) (tmp >> 8)) ^ 0x80;
}
}
/*--------------- 8 pt dual window (interleaved output) -----------------*/
void CDecompressMpeg::windowB8_dual(float* vbuf, int vb_ptr,
unsigned char* pcm)
{
int i, j;
int si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 4;
bx = (si + 8) & 127;
coef = wincoef;
/*-- first 4 --*/
for (i = 0; i < 4; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si = (si + 16) & 127;
sum -= (*coef++) * vbuf[bx];
bx = (bx + 16) & 127;
}
si++;
bx--;
coef += 48;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = ((unsigned char) (tmp >> 8)) ^ 0x80;
pcm += 2;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx = (bx + 16) & 127;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = ((unsigned char) (tmp >> 8)) ^ 0x80;
pcm += 2;
/*-- last 3 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 3; i++) {
coef -= 48;
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si = (si + 16) & 127;
sum += (*coef--) * vbuf[bx];
bx = (bx + 16) & 127;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = ((unsigned char) (tmp >> 8)) ^ 0x80;
pcm += 2;
}
}
void CDecompressMpeg::sbtB_mono(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct32(sample, vbuf + vb_ptr);
windowB(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 32;
}
}
void CDecompressMpeg::sbtB_dual(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct32_dual(sample, vbuf + vb_ptr);
fdct32_dual(sample + 1, vbuf2 + vb_ptr);
windowB_dual(vbuf, vb_ptr, pcm);
windowB_dual(vbuf2, vb_ptr, pcm + 1);
sample += 64;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 64;
}
}
void CDecompressMpeg::sbtB_dual_mono(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct32_dual_mono(sample, vbuf + vb_ptr);
windowB(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 32;
}
}
void CDecompressMpeg::sbtB_dual_left(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct32_dual(sample, vbuf + vb_ptr);
windowB(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 32;
}
}
void CDecompressMpeg::sbtB_dual_right(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
sample++; /* point to right chan */
for (i = 0; i < n; i++) {
fdct32_dual(sample, vbuf + vb_ptr);
windowB(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 32;
}
}
void CDecompressMpeg::sbtB16_mono(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct16(sample, vbuf + vb_ptr);
windowB16(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 16;
}
}
void CDecompressMpeg::sbtB16_dual(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct16_dual(sample, vbuf + vb_ptr);
fdct16_dual(sample + 1, vbuf2 + vb_ptr);
windowB16_dual(vbuf, vb_ptr, pcm);
windowB16_dual(vbuf2, vb_ptr, pcm + 1);
sample += 64;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 32;
}
}
void CDecompressMpeg::sbtB16_dual_mono(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct16_dual_mono(sample, vbuf + vb_ptr);
windowB16(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 16;
}
}
void CDecompressMpeg::sbtB16_dual_left(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct16_dual(sample, vbuf + vb_ptr);
windowB16(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 16;
}
}
void CDecompressMpeg::sbtB16_dual_right(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
sample++;
for (i = 0; i < n; i++) {
fdct16_dual(sample, vbuf + vb_ptr);
windowB16(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 16;
}
}
void CDecompressMpeg::sbtB8_mono(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct8(sample, vbuf + vb_ptr);
windowB8(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 8;
}
}
void CDecompressMpeg::sbtB8_dual(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct8_dual(sample, vbuf + vb_ptr);
fdct8_dual(sample + 1, vbuf2 + vb_ptr);
windowB8_dual(vbuf, vb_ptr, pcm);
windowB8_dual(vbuf2, vb_ptr, pcm + 1);
sample += 64;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 16;
}
}
void CDecompressMpeg::sbtB8_dual_mono(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct8_dual_mono(sample, vbuf + vb_ptr);
windowB8(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 8;
}
}
void CDecompressMpeg::sbtB8_dual_left(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
for (i = 0; i < n; i++) {
fdct8_dual(sample, vbuf + vb_ptr);
windowB8(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 8;
}
}
void CDecompressMpeg::sbtB8_dual_right(float* sample, void* in_pcm, int n)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
sample++;
for (i = 0; i < n; i++) {
fdct8_dual(sample, vbuf + vb_ptr);
windowB8(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 8;
}
}
void CDecompressMpeg::sbtB_mono_L3(float* sample, void* in_pcm, int ch)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
ch = 0;
for (i = 0; i < 18; i++) {
fdct32(sample, vbuf + vb_ptr);
windowB(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 32;
}
}
void CDecompressMpeg::sbtB_dual_L3(float* sample, void* in_pcm, int ch)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
if (ch == 0)
for (i = 0; i < 18; i++) {
fdct32(sample, vbuf + vb_ptr);
windowB_dual(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 64;
}
else
for (i = 0; i < 18; i++) {
fdct32(sample, vbuf2 + vb2_ptr);
windowB_dual(vbuf2, vb2_ptr, pcm + 1);
sample += 32;
vb2_ptr = (vb2_ptr - 32) & 511;
pcm += 64;
}
}
void CDecompressMpeg::sbtB16_mono_L3(float* sample, void* in_pcm, int ch)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
ch = 0;
for (i = 0; i < 18; i++) {
fdct16(sample, vbuf + vb_ptr);
windowB16(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 16;
}
}
void CDecompressMpeg::sbtB16_dual_L3(float* sample, void* in_pcm, int ch)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
if (ch == 0) {
for (i = 0; i < 18; i++) {
fdct16(sample, vbuf + vb_ptr);
windowB16_dual(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 32;
}
} else {
for (i = 0; i < 18; i++) {
fdct16(sample, vbuf2 + vb2_ptr);
windowB16_dual(vbuf2, vb2_ptr, pcm + 1);
sample += 32;
vb2_ptr = (vb2_ptr - 16) & 255;
pcm += 32;
}
}
}
void CDecompressMpeg::sbtB8_mono_L3(float* sample, void* in_pcm, int ch)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
ch = 0;
for (i = 0; i < 18; i++) {
fdct8(sample, vbuf + vb_ptr);
windowB8(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 8;
}
}
void CDecompressMpeg::sbtB8_dual_L3(float* sample, void* in_pcm, int ch)
{
int i;
unsigned char * pcm = (unsigned char *) in_pcm;
if (ch == 0) {
for (i = 0; i < 18; i++) {
fdct8(sample, vbuf + vb_ptr);
windowB8_dual(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 16;
}
} else {
for (i = 0; i < 18; i++) {
fdct8(sample, vbuf2 + vb2_ptr);
windowB8_dual(vbuf2, vb2_ptr, pcm + 1);
sample += 32;
vb2_ptr = (vb2_ptr - 8) & 127;
pcm += 16;
}
}
}
// window coefs
float CDecompressMpeg::wincoef[264] = {
0.000000000f, 0.000442505f, -0.003250122f, 0.007003784f, -0.031082151f,
0.078628540f, -0.100311279f, 0.572036743f, -1.144989014f, -0.572036743f,
-0.100311279f, -0.078628540f, -0.031082151f, -0.007003784f, -0.003250122f,
-0.000442505f, 0.000015259f, 0.000473022f, -0.003326416f, 0.007919312f,
-0.030517576f, 0.084182739f, -0.090927124f, 0.600219727f, -1.144287109f,
-0.543823242f, -0.108856201f, -0.073059082f, -0.031478882f, -0.006118774f,
-0.003173828f, -0.000396729f, 0.000015259f, 0.000534058f, -0.003387451f,
0.008865356f, -0.029785154f, 0.089706421f, -0.080688477f, 0.628295898f,
-1.142211914f, -0.515609741f, -0.116577141f, -0.067520142f, -0.031738281f,
-0.005294800f, -0.003082275f, -0.000366211f, 0.000015259f, 0.000579834f,
-0.003433228f, 0.009841919f, -0.028884888f, 0.095169067f, -0.069595337f,
0.656219482f, -1.138763428f, -0.487472534f, -0.123474121f, -0.061996460f,
-0.031845093f, -0.004486084f, -0.002990723f, -0.000320435f, 0.000015259f,
0.000625610f, -0.003463745f, 0.010848999f, -0.027801514f, 0.100540161f,
-0.057617184f, 0.683914185f, -1.133926392f, -0.459472656f, -0.129577637f,
-0.056533810f, -0.031814575f, -0.003723145f, -0.002899170f, -0.000289917f,
0.000015259f, 0.000686646f, -0.003479004f, 0.011886597f, -0.026535034f,
0.105819702f, -0.044784546f, 0.711318970f, -1.127746582f, -0.431655884f,
-0.134887695f, -0.051132202f, -0.031661987f, -0.003005981f, -0.002792358f,
-0.000259399f, 0.000015259f, 0.000747681f, -0.003479004f, 0.012939452f,
-0.025085449f, 0.110946655f, -0.031082151f, 0.738372803f, -1.120223999f,
-0.404083252f, -0.139450073f, -0.045837402f, -0.031387329f, -0.002334595f,
-0.002685547f, -0.000244141f, 0.000030518f, 0.000808716f, -0.003463745f,
0.014022826f, -0.023422241f, 0.115921021f, -0.016510010f, 0.765029907f,
-1.111373901f, -0.376800537f, -0.143264771f, -0.040634155f, -0.031005858f,
-0.001693726f, -0.002578735f, -0.000213623f, 0.000030518f, 0.000885010f,
-0.003417969f, 0.015121460f, -0.021575928f, 0.120697014f, -0.001068115f,
0.791213989f, -1.101211548f, -0.349868774f, -0.146362305f, -0.035552979f,
-0.030532837f, -0.001098633f, -0.002456665f, -0.000198364f, 0.000030518f,
0.000961304f, -0.003372192f, 0.016235352f, -0.019531250f, 0.125259399f,
0.015228271f, 0.816864014f, -1.089782715f, -0.323318481f, -0.148773193f,
-0.030609131f, -0.029937742f, -0.000549316f, -0.002349854f, -0.000167847f,
0.000030518f, 0.001037598f, -0.003280640f, 0.017349243f, -0.017257690f,
0.129562378f, 0.032379150f, 0.841949463f, -1.077117920f, -0.297210693f,
-0.150497437f, -0.025817871f, -0.029281614f, -0.000030518f, -0.002243042f,
-0.000152588f, 0.000045776f, 0.001113892f, -0.003173828f, 0.018463135f,
-0.014801024f, 0.133590698f, 0.050354004f, 0.866363525f, -1.063217163f,
-0.271591187f, -0.151596069f, -0.021179199f, -0.028533936f, 0.000442505f,
-0.002120972f, -0.000137329f, 0.000045776f, 0.001205444f, -0.003051758f,
0.019577026f, -0.012115479f, 0.137298584f, 0.069168091f, 0.890090942f,
-1.048156738f, -0.246505737f, -0.152069092f, -0.016708374f, -0.027725220f,
0.000869751f, -0.002014160f, -0.000122070f, 0.000061035f, 0.001296997f,
-0.002883911f, 0.020690918f, -0.009231566f, 0.140670776f, 0.088775635f,
0.913055420f, -1.031936646f, -0.221984863f, -0.151962280f, -0.012420653f,
-0.026840210f, 0.001266479f, -0.001907349f, -0.000106812f, 0.000061035f,
0.001388550f, -0.002700806f, 0.021789551f, -0.006134033f, 0.143676758f,
0.109161377f, 0.935195923f, -1.014617920f, -0.198059082f, -0.151306152f,
-0.008316040f, -0.025909424f, 0.001617432f, -0.001785278f, -0.000106812f,
0.000076294f, 0.001480103f, -0.002487183f, 0.022857666f, -0.002822876f,
0.146255493f, 0.130310059f, 0.956481934f, -0.996246338f, -0.174789429f,
-0.150115967f, -0.004394531f, -0.024932859f, 0.001937866f, -0.001693726f,
-0.000091553f, -0.001586914f, -0.023910521f, -0.148422241f, -0.976852417f,
0.152206421f, 0.000686646f, -0.002227783f, 0.000076294f,
};
void CDecompressMpeg::window(float* vbuf, int vb_ptr, short* pcm)
{
int i, j;
int si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 16;
bx = (si + 32) & 511;
coef = wincoef;
/*-- first 16 --*/
for (i = 0; i < 16; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si = (si + 64) & 511;
sum -= (*coef++) * vbuf[bx];
bx = (bx + 64) & 511;
}
si++;
bx--;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = (short) tmp;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx = (bx + 64) & 511;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = (short) tmp;
/*-- last 15 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 15; i++) {
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si = (si + 64) & 511;
sum += (*coef--) * vbuf[bx];
bx = (bx + 64) & 511;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = (short) tmp;
}
}
void CDecompressMpeg::window_dual(float* vbuf, int vb_ptr, short* pcm)
{
int i, j; /* dual window interleaves output */
int si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 16;
bx = (si + 32) & 511;
coef = wincoef;
/*-- first 16 --*/
for (i = 0; i < 16; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si = (si + 64) & 511;
sum -= (*coef++) * vbuf[bx];
bx = (bx + 64) & 511;
}
si++;
bx--;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = (short) tmp;
pcm += 2;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx = (bx + 64) & 511;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = (short) tmp;
pcm += 2;
/*-- last 15 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 15; i++) {
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si = (si + 64) & 511;
sum += (*coef--) * vbuf[bx];
bx = (bx + 64) & 511;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = (short) tmp;
pcm += 2;
}
}
void CDecompressMpeg::window16(float* vbuf, int vb_ptr, short* pcm)
{
int i, j;
unsigned char si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 8;
bx = si + 16;
coef = wincoef;
/*-- first 8 --*/
for (i = 0; i < 8; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si += 32;
sum -= (*coef++) * vbuf[bx];
bx += 32;
}
si++;
bx--;
coef += 16;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = (short) tmp;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx += 32;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = (short) tmp;
/*-- last 7 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 7; i++) {
coef -= 16;
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si += 32;
sum += (*coef--) * vbuf[bx];
bx += 32;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = (short) tmp;
}
}
void CDecompressMpeg::window16_dual(float* vbuf, int vb_ptr, short* pcm)
{
int i, j;
unsigned char si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 8;
bx = si + 16;
coef = wincoef;
/*-- first 8 --*/
for (i = 0; i < 8; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si += 32;
sum -= (*coef++) * vbuf[bx];
bx += 32;
}
si++;
bx--;
coef += 16;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = (short) tmp;
pcm += 2;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx += 32;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = (short) tmp;
pcm += 2;
/*-- last 7 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 7; i++) {
coef -= 16;
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si += 32;
sum += (*coef--) * vbuf[bx];
bx += 32;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = (short) tmp;
pcm += 2;
}
}
void CDecompressMpeg::window8(float* vbuf, int vb_ptr, short* pcm)
{
int i, j;
int si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 4;
bx = (si + 8) & 127;
coef = wincoef;
/*-- first 4 --*/
for (i = 0; i < 4; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si = (si + 16) & 127;
sum -= (*coef++) * vbuf[bx];
bx = (bx + 16) & 127;
}
si++;
bx--;
coef += 48;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = (short) tmp;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx = (bx + 16) & 127;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = (short) tmp;
/*-- last 3 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 3; i++) {
coef -= 48;
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si = (si + 16) & 127;
sum += (*coef--) * vbuf[bx];
bx = (bx + 16) & 127;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm++ = (short) tmp;
}
}
void CDecompressMpeg::window8_dual(float* vbuf, int vb_ptr, short* pcm)
{
int i, j;
int si, bx;
float* coef;
float sum;
long tmp;
si = vb_ptr + 4;
bx = (si + 8) & 127;
coef = wincoef;
/*-- first 4 --*/
for (i = 0; i < 4; i++) {
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[si];
si = (si + 16) & 127;
sum -= (*coef++) * vbuf[bx];
bx = (bx + 16) & 127;
}
si++;
bx--;
coef += 48;
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = (short) tmp;
pcm += 2;
}
/*-- special case --*/
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef++) * vbuf[bx];
bx = (bx + 16) & 127;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = (short) tmp;
pcm += 2;
/*-- last 3 --*/
coef = wincoef + 255; /* back pass through coefs */
for (i = 0; i < 3; i++) {
coef -= 48;
si--;
bx++;
sum = 0.0F;
for (j = 0; j < 8; j++) {
sum += (*coef--) * vbuf[si];
si = (si + 16) & 127;
sum += (*coef--) * vbuf[bx];
bx = (bx + 16) & 127;
}
tmp = (long) sum;
if (tmp > 32767)
tmp = 32767;
else if (tmp < -32768)
tmp = -32768;
*pcm = (short) tmp;
pcm += 2;
}
}
void CDecompressMpeg::sbt_init()
{
int i;
/* clear window vbuf */
for (i = 0; i < 512; i++) {
vbuf[i] = 0.0F;
vbuf2[i] = 0.0F;
}
vb2_ptr = vb_ptr = 0;
}
void CDecompressMpeg::sbt_mono(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct32(sample, vbuf + vb_ptr);
window(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 32;
}
}
void CDecompressMpeg::sbt_dual(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct32_dual(sample, vbuf + vb_ptr);
fdct32_dual(sample + 1, vbuf2 + vb_ptr);
window_dual(vbuf, vb_ptr, pcm);
window_dual(vbuf2, vb_ptr, pcm + 1);
sample += 64;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 64;
}
}
void CDecompressMpeg::sbt_dual_mono(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct32_dual_mono(sample, vbuf + vb_ptr);
window(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 32;
}
}
void CDecompressMpeg::sbt_dual_left(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct32_dual(sample, vbuf + vb_ptr);
window(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 32;
}
}
void CDecompressMpeg::sbt_dual_right(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
sample++; /* point to right chan */
for (i = 0; i < n; i++) {
fdct32_dual(sample, vbuf + vb_ptr);
window(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 32;
}
}
void CDecompressMpeg::sbt16_mono(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct16(sample, vbuf + vb_ptr);
window16(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 16;
}
}
void CDecompressMpeg::sbt16_dual(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct16_dual(sample, vbuf + vb_ptr);
fdct16_dual(sample + 1, vbuf2 + vb_ptr);
window16_dual(vbuf, vb_ptr, pcm);
window16_dual(vbuf2, vb_ptr, pcm + 1);
sample += 64;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 32;
}
}
void CDecompressMpeg::sbt16_dual_mono(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct16_dual_mono(sample, vbuf + vb_ptr);
window16(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 16;
}
}
void CDecompressMpeg::sbt16_dual_left(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct16_dual(sample, vbuf + vb_ptr);
window16(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 16;
}
}
void CDecompressMpeg::sbt16_dual_right(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
sample++;
for (i = 0; i < n; i++) {
fdct16_dual(sample, vbuf + vb_ptr);
window16(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 16;
}
}
void CDecompressMpeg::sbt8_mono(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct8(sample, vbuf + vb_ptr);
window8(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 8;
}
}
void CDecompressMpeg::sbt8_dual(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct8_dual(sample, vbuf + vb_ptr);
fdct8_dual(sample + 1, vbuf2 + vb_ptr);
window8_dual(vbuf, vb_ptr, pcm);
window8_dual(vbuf2, vb_ptr, pcm + 1);
sample += 64;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 16;
}
}
void CDecompressMpeg::sbt8_dual_mono(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct8_dual_mono(sample, vbuf + vb_ptr);
window8(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 8;
}
}
void CDecompressMpeg::sbt8_dual_left(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
for (i = 0; i < n; i++) {
fdct8_dual(sample, vbuf + vb_ptr);
window8(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 8;
}
}
void CDecompressMpeg::sbt8_dual_right(float* sample, void* in_pcm, int n)
{
int i;
short* pcm = (short*) in_pcm;
sample++;
for (i = 0; i < n; i++) {
fdct8_dual(sample, vbuf + vb_ptr);
window8(vbuf, vb_ptr, pcm);
sample += 64;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 8;
}
}
void CDecompressMpeg::sbt_mono_L3(float* sample, void* in_pcm, int ch)
{
int i;
short* pcm = (short*) in_pcm;
ch = 0;
for (i = 0; i < 18; i++) {
fdct32(sample, vbuf + vb_ptr);
window(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 32;
}
}
void CDecompressMpeg::sbt_dual_L3(float* sample, void* in_pcm, int ch)
{
int i;
short* pcm = (short*) in_pcm;
if (ch == 0)
for (i = 0; i < 18; i++) {
fdct32(sample, vbuf + vb_ptr);
window_dual(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 32) & 511;
pcm += 64;
}
else
for (i = 0; i < 18; i++) {
fdct32(sample, vbuf2 + vb2_ptr);
window_dual(vbuf2, vb2_ptr, pcm + 1);
sample += 32;
vb2_ptr = (vb2_ptr - 32) & 511;
pcm += 64;
}
}
void CDecompressMpeg::sbt16_mono_L3(float* sample, void* in_pcm, int ch)
{
int i;
short* pcm = (short*) in_pcm;
ch = 0;
for (i = 0; i < 18; i++) {
fdct16(sample, vbuf + vb_ptr);
window16(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 16;
}
}
void CDecompressMpeg::sbt16_dual_L3(float* sample, void* in_pcm, int ch)
{
int i;
short* pcm = (short*) in_pcm;
if (ch == 0) {
for (i = 0; i < 18; i++) {
fdct16(sample, vbuf + vb_ptr);
window16_dual(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 16) & 255;
pcm += 32;
}
} else {
for (i = 0; i < 18; i++) {
fdct16(sample, vbuf2 + vb2_ptr);
window16_dual(vbuf2, vb2_ptr, pcm + 1);
sample += 32;
vb2_ptr = (vb2_ptr - 16) & 255;
pcm += 32;
}
}
}
void CDecompressMpeg::sbt8_mono_L3(float* sample, void* in_pcm, int ch)
{
int i;
short* pcm = (short*) in_pcm;
ch = 0;
for (i = 0; i < 18; i++) {
fdct8(sample, vbuf + vb_ptr);
window8(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 8;
}
}
void CDecompressMpeg::sbt8_dual_L3(float* sample, void* in_pcm, int ch)
{
int i;
short* pcm = (short*) in_pcm;
if (ch == 0) {
for (i = 0; i < 18; i++) {
fdct8(sample, vbuf + vb_ptr);
window8_dual(vbuf, vb_ptr, pcm);
sample += 32;
vb_ptr = (vb_ptr - 8) & 127;
pcm += 16;
}
} else {
for (i = 0; i < 18; i++) {
fdct8(sample, vbuf2 + vb2_ptr);
window8_dual(vbuf2, vb2_ptr, pcm + 1);
sample += 32;
vb2_ptr = (vb2_ptr - 8) & 127;
pcm += 16;
}
}
}
int CDecompressMpeg::br_tbl[3][3][16] = {
{// MPEG-1
// Layer1
{ 0, 32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448, 0 },
// Layer2
{ 0, 32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384, 0 },
// Layer3
{ 0, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 0 },
}, {// MPEG-2
// Layer1
{ 0, 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256, 0 },
// Layer2
{ 0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160, 0 },
// Layer3
{ 0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160, 0 },
}, {// MPEG-2.5
// Layer1 (not available)
{ 0, 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256, 0 },
// Layer2 (not available)
{ 0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160, 0 },
// Layer3
{ 0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160, 0 },
},
};
int CDecompressMpeg::fr_tbl[3][4] = {
{ 44100, 48000, 32000, 0 }, // MPEG-1
{ 22050, 24000, 16000, 0 }, // MPEG-2
{ 11025, 12000, 8000, 0 }, // MPEG-2.5
};
void CDecompressMpeg::mp3DecodeInit()
{
m_option.reduction = 0;
m_option.convert = 0;
m_option.freqLimit = 24000;
L1table_init();
L2table_init();
L3table_init();
}
int CDecompressMpeg::mp3GetHeader(BYTE* buf, MPEG_HEADER* h)
{
h->version = (buf[1] & 0x08) >> 3;
h->layer = (buf[1] & 0x06) >> 1;
h->error_prot = (buf[1] & 0x01);
h->br_index = (buf[2] & 0xf0) >> 4;
h->fr_index = (buf[2] & 0x0c) >> 2;
h->padding = (buf[2] & 0x02) >> 1;
h->extension = (buf[2] & 0x01);
h->mode = (buf[3] & 0xc0) >> 6;
h->mode_ext = (buf[3] & 0x30) >> 4;
h->copyright = (buf[3] & 0x08) >> 3;
h->original = (buf[3] & 0x04) >> 2;
h->emphasis = (buf[3] & 0x03);
if (buf[0] != 0xFF) {
//sync error
m_last_error = MP3_ERROR_INVALID_SYNC;
return 0;
}
if ((buf[1] & 0xF0) == 0xF0) //MPEG-1, MPEG-2
h->version = (h->version) ? 1 : 2;
else if ((buf[1] & 0xF0) == 0xE0) //MPEG-2.5
h->version = 3;
else {
m_last_error = MP3_ERROR_INVALID_SYNC;
return 0;
}
if (h->fr_index >= 3 ||
h->br_index == 0 ||
h->br_index >= 15 ||
h->layer == 0 ||
h->layer >= 4) {
m_last_error = MP3_ERROR_INVALID_HEADER;
return 0;
}
h->layer = 4 - h->layer;
h->error_prot = (h->error_prot) ? 0 : 1;
return 1;
}
bool CDecompressMpeg::mp3GetHeaderInfo(BYTE* buffer, MPEG_HEADER_INFO* info)
{
int ch, ver;
MPEG_HEADER* h =& info->header;
// получим информацию из заголовка
if (!mp3GetHeader(buffer, h))
return false;
// расчет нужных данных
info->curBitRate = br_tbl[h->version - 1][h->layer - 1][h->br_index] * 1000;
switch (h->layer) {
case 1:
//layer1
info->curFrameSize = (12 * info->curBitRate / m_frequency + h->padding) * 4;
break;
case 2:
//layer2
info->curFrameSize = 144 * info->curBitRate /
m_frequency +
h->padding;
break;
case 3:
//layer3
if (h->version == 1)
info->curFrameSize = 144 * info->curBitRate /
m_frequency +
h->padding;
else
info->curFrameSize = (144 * info->curBitRate / m_frequency) /
2 +
h->padding;
break;
}
ch = (h->mode == 3) ? 1 : 2;
ver = (h->version == 1) ? 1 : 2;
info->samplesInFrame = (1152 >> m_option.reduction) / ver;
info->outputSize = info->samplesInFrame * 2 * ch;
return true;
}
int CDecompressMpeg::mp3GetLastError()
{
return m_last_error;
}
int CDecompressMpeg::mp3FindSync(BYTE* buf, int size, int* sync)
{
int i;
MPEG_HEADER h;
*sync = 0;
size -= 3;
if (size <= 0) {
m_last_error = MP3_ERROR_OUT_OF_BUFFER;
return 0;
}
// поиск данных
for (i = 0; i < size; i++) {
if (buf[i] == 0xFF) {
if (mp3GetHeader(buf + i, & h)) {
if ((h.layer == _layer) &&
(h.version == _version) &&
(h.br_index == _br_index) &&
(h.fr_index == _fr_index) &&
(h.mode == _mode))
break;
}
}
}
if (i == size) {
m_last_error = MP3_ERROR_OUT_OF_BUFFER;
return 0;
}
*sync = i;
return 1;
}
void CDecompressMpeg::mp3GetDecodeOption(MPEG_DECODE_OPTION* option)
{
*option = m_option;
}
int CDecompressMpeg::mp3SetDecodeOption(MPEG_DECODE_OPTION* option)
{
m_option = *option;
return 1;
}
/*
//-----------------------------------------------------------------------------
// Установка эквалайзера
// value - указатель на параметры эквалайзера
//-----------------------------------------------------------------------------
int CDecompressMpeg::mp3SetEqualizer(int* value)
{
int i;
if (value == (void*)0) {
m_enableEQ = 0;
return 1;
}
m_enableEQ = 1;
//60, 170, 310, 600, 1K, 3K
for (i = 0; i < 6; i ++) {
m_equalizer[i] = (float)pow(10,(double)value[i]/200);
}
//6K
m_equalizer[6] = (float)pow(10,(double)value[6]/200);
m_equalizer[7] = m_equalizer[6];
//12K
m_equalizer[8] = (float)pow(10,(double)value[7]/200);
m_equalizer[9] = m_equalizer[8];
m_equalizer[10] = m_equalizer[8];
m_equalizer[11] = m_equalizer[8];
//14K
m_equalizer[12] = (float)pow(10,(double)value[8]/200);
m_equalizer[13] = m_equalizer[12];
m_equalizer[14] = m_equalizer[12];
m_equalizer[15] = m_equalizer[12];
m_equalizer[16] = m_equalizer[12];
m_equalizer[17] = m_equalizer[12];
m_equalizer[18] = m_equalizer[12];
m_equalizer[19] = m_equalizer[12];
//16K
m_equalizer[20] = (float)pow(10,(double)value[9]/200);
m_equalizer[21] = m_equalizer[20];
m_equalizer[22] = m_equalizer[20];
m_equalizer[23] = m_equalizer[20];
m_equalizer[24] = m_equalizer[20];
m_equalizer[25] = m_equalizer[20];
m_equalizer[26] = m_equalizer[20];
m_equalizer[27] = m_equalizer[20];
m_equalizer[28] = m_equalizer[20];
m_equalizer[29] = m_equalizer[20];
m_equalizer[30] = m_equalizer[20];
m_equalizer[31] = m_equalizer[20];
return 1;
}
*/
#define VBR_FRAMES_FLAG 0x0001
#define VBR_BYTES_FLAG 0x0002
#define VBR_TOC_FLAG 0x0004
#define VBR_SCALE_FLAG 0x0008
// big endian extract
int CDecompressMpeg::extractInt4(BYTE* buf)
{
return buf[3] | (buf[2] << 8) | (buf[1] << 16) | (buf[0] << 24);
}
//-----------------------------------------------------------------------------
// извленение заголовка и важных данных
// mpeg - указатель на буфер с данными
// size - размер буфера с данными
// info - указатель на структуру куда поместить расширенные данные
// decFlag - ? помоему использовать настройки частоты из файла
//-----------------------------------------------------------------------------
int CDecompressMpeg::mp3GetDecodeInfo(BYTE* mpeg, int size,
MPEG_DECODE_INFO* info, int decFlag)
{
MPEG_HEADER* h =& info->header;
byte* p = mpeg;
int vbr;
DWORD minBitRate, maxBitRate;
DWORD i, j, flags;
//int bitRate;
//int frame_size;
// if (size < 156) {//max vbr header size
// m_last_error = MP3_ERROR_OUT_OF_BUFFER;
// return 0;
// }
if (!mp3GetHeader(p, h)) {
return 0;
}
//check VBR Header
p += 4;//skip mpeg header
if (h->error_prot)
p += 2;//skip crc
if (h->layer == 3) {
//skip side info
if (h->version == 1) {
//MPEG-1
if (h->mode != 3)
p += 32;
else
p += 17;
} else {
//MPEG-2, MPEG-2.5
if (h->mode != 3)
p += 17;
else
p += 9;
}
}
info->bitRate = br_tbl[h->version - 1][h->layer - 1][h->br_index] * 1000;
info->frequency = fr_tbl[h->version - 1][h->fr_index];
if (memcmp(p, "Xing", 4) == 0) {
//VBR
p += 4;
flags = extractInt4(p);
p += 4;
if (!(flags & (VBR_FRAMES_FLAG | VBR_BYTES_FLAG))) {
m_last_error = MP3_ERROR_INVALID_HEADER;
return 0;
}
info->frames = extractInt4(p);
p += 4;
info->dataSize = extractInt4(p);
p += 4;
if (flags & VBR_TOC_FLAG)
p += 100;
if (flags & VBR_SCALE_FLAG)
p += 4;
/*
//•WЏЂVBR‘О‰ћ
if ( p[0] == mpeg[0] && p[1] == mpeg[1] ) {
info->skipSize = (int)(p - mpeg);
} else {
bitRate = br_tbl[h->version-1][h->layer-1][h->br_index] * 1000;
switch (h->layer) {
case 1://layer1
frame_size = (12 * bitRate / fr_tbl[h->version-1][h->fr_index]) * 4;//one slot is 4 bytes long
if (h->padding) frame_size += 4;
break;
case 2://layer2
frame_size = 144 * bitRate / fr_tbl[h->version-1][h->fr_index];
if (h->padding) frame_size ++;
break;
case 3://layer3
frame_size = 144 * bitRate / fr_tbl[h->version-1][h->fr_index];
if (h->version != 1) //MPEG-2, MPEG-2.5
frame_size /= 2;
if (h->padding) frame_size ++;
break;
}
info->skipSize = (int)(frame_size);
}
info->bitRate = 0;
*/
vbr = 1;
minBitRate = 0xffffffff;
maxBitRate = 0;
for (i = 1; i < 15; i ++) {
j = br_tbl[h->version - 1][h->layer - 1][i] * 1000;
if (j < minBitRate)
minBitRate = j;
if (j > maxBitRate)
maxBitRate = j;
}
} else if (memcmp(p, "VBRI", 4) == 0) {
//VBRI
p += 10;
info->dataSize = extractInt4(p);
p += 4;
info->frames = extractInt4(p);
p += 4;
vbr = 1;
minBitRate = 0xffffffff;
maxBitRate = 0;
for (i = 1; i < 15; i ++) {
j = br_tbl[h->version - 1][h->layer - 1][i] * 1000;
if (j < minBitRate)
minBitRate = j;
if (j > maxBitRate)
maxBitRate = j;
}
} else {
//not VBR
vbr = 0;
info->frames = 0;
//info->skipSize = 0;
info->dataSize = 0;
//info->bitRate = br_tbl[h->version-1][h->layer-1][h->br_index] * 1000;
}
// info->frequency = fr_tbl[h->version-1][h->fr_index];
// info->msPerFrame = ms_p_f_table[h->layer-1][h->fr_index];
// if (h->version == 3) info->msPerFrame *= 2;
switch (h->layer) {
case 1:
//layer1
info->outputSize = 384 >> m_option.reduction;
//if (info->bitRate) {
if (!vbr) {
info->skipSize = 0;
info->minInputSize = (12 * info->bitRate / info->frequency) * 4;//one slot is 4 bytes long
info->maxInputSize = info->minInputSize + 4;
} else {
info->skipSize = (12 * info->bitRate /
info->frequency +
h->padding) * 4;
info->minInputSize = (12 * minBitRate / info->frequency) * 4;
info->maxInputSize = (12 * maxBitRate / info->frequency) * 4 + 4;
}
break;
case 2:
//layer2
info->outputSize = 1152 >> m_option.reduction;
//if (info->bitRate) {
if (!vbr) {
info->skipSize = 0;
info->minInputSize = 144 * info->bitRate / info->frequency;
info->maxInputSize = info->minInputSize + 1;
} else {
info->skipSize = 144 * info->bitRate /
info->frequency +
h->padding;
info->minInputSize = 144 * minBitRate / info->frequency;
info->maxInputSize = 144 * maxBitRate / info->frequency + 1;
}
break;
case 3:
//layer3
i = (h->version == 1) ? 1 : 2;
//info->outputSize = 1152 >> m_option.reduction;
info->outputSize = (1152 >> m_option.reduction) / i;
//if (info->bitRate) {
if (!vbr) {
info->skipSize = 0;
info->minInputSize = 144 * info->bitRate / info->frequency / i;
info->maxInputSize = info->minInputSize + 1;
} else {
info->skipSize = 144 * info->bitRate /
info->frequency /
i +
h->padding;
info->minInputSize = 144 * minBitRate / info->frequency / i;
info->maxInputSize = 144 * maxBitRate / info->frequency / i + 1;
}
break;
/*
if (h->version != 1) {
//MPEG-2, MPEG-2.5
info->outputSize /= 2;
info->minInputSize /= 2;
info->maxInputSize /= 2;
}
info->maxInputSize ++;
break;
*/
}
if ((h->mode == 3) || (m_option.convert & 3))
info->channels = 1;
else
info->channels = 2;
if (m_option.convert & 8) {
//not available
info->bitsPerSample = 8;
info->outputSize *= info->channels;
} else {
info->bitsPerSample = 16;
info->outputSize *= info->channels * 2;
}
if (decFlag == 1) {
m_frequency = info->frequency;
m_pcm_size = info->outputSize;
}
info->frequency >>= m_option.reduction;
info->HeadBitRate = info->bitRate;
if (vbr)
info->bitRate = 0;
return 1;
}
// начало декодирования
int CDecompressMpeg::mp3DecodeStart(BYTE* mpeg, int size)
{
MPEG_DECODE_INFO info;
MPEG_HEADER* h =& info.header;
// распаковка заголовка и предрасчет важных данных
if (!mp3GetDecodeInfo(mpeg, size, & info, 1))
return 0;
// инициализация
sbt_init();
// вызов методов инициализации слоя
switch (h->layer) {
case 1:
L1decode_start(h);
break;
case 2:
L2decode_start(h);
break;
case 3:
L3decode_start(h);
break;
}
return 1;
}
// декодирование 1 фрейма
int CDecompressMpeg::mp3DecodeFrame(MPEG_DECODE_PARAM* param)
{
MPEG_HEADER* h =& param->header;
// проверка размера входных данных
if (param->inputSize <= 4) {
m_last_error = MP3_ERROR_OUT_OF_BUFFER;
return 0;
}
// прочитаем заголовок
if (!mp3GetHeader((unsigned char *) param->inputBuf, h)) {
return 0;
}
// вычисление размера данных в фрейме
param->bitRate = br_tbl[h->version - 1][h->layer - 1][h->br_index] * 1000;
switch (h->layer) {
//layer1
case 1:
m_frame_size = (12 * param->bitRate / m_frequency + h->padding) * 4;
break;
//layer2
case 2:
m_frame_size = 144 * param->bitRate / m_frequency + h->padding;
break;
//layer3
case 3:
if (h->version == 1)
m_frame_size = 144 * param->bitRate / m_frequency + h->padding;
else
m_frame_size = (144 * param->bitRate / m_frequency) /
2 +
h->padding;
break;
}
// проверка размера входных данных
if (param->inputSize < m_frame_size) {
m_last_error = MP3_ERROR_OUT_OF_BUFFER;
return 0;
}
// подбор декодера
switch (h->layer) {
case 1:
L1decode_frame(h,
(unsigned char *) param->inputBuf,
(unsigned char *) param->outputBuf);
break;
case 2:
L2decode_frame(h,
(unsigned char *) param->inputBuf,
(unsigned char *) param->outputBuf);
break;
case 3:
L3decode_frame(h,
(unsigned char *) param->inputBuf,
(unsigned char *) param->outputBuf);
break;
}
//!!!todo m_frame_proc(h, (unsigned char*)param->inputBuf, (unsigned char *)param->outputBuf);
// скоректируем размеры входного и выходного буфера
param->inputSize = m_frame_size;
param->outputSize = m_pcm_size;
return 1;
}
void CDecompressMpeg::mp3Reset(void)
{
sbt_init();
L3decode_reset();
}
//-----------------------------------------------------------------------------
// Установка новой позиции файла
// на входе : pos - новая позиция в файле
// на выходе : *
//-----------------------------------------------------------------------------
int CDecompressMpeg::mp3seek(DWORD frame)
{
// инициализация переменных
DWORD cur = 0;
DWORD back = 3;
int off = 0;
DWORD need_frame_offset = 0;
// позиционируемся на данных
if (_curFrame != frame) {
if (_curFrame != (frame - 1)) {
// прочитаем на несколько фреймов назад
if (frame > back)
frame -= back;
else {
back = frame;
frame = 0;
}
if (!_vbr) {
// приблизительный расчет положения фрейма
need_frame_offset = (DWORD)
floor(((double) frame * _bitPerFrame) /
8);
// поиск начала фрейма
while (1) {
// установка позиции чтения
if (SourceData->seek(need_frame_offset, 0) !=
need_frame_offset)
return 0;
// проверка на конец файла
if (SourceData->eof())
return 0;
// прочитаем данные для поиска начала
if (SourceData->peek(_frameBuffer, _minFrameSize) !=
_minFrameSize)
return 0;
// поиск начала файла
if (!mp3FindSync(_frameBuffer, _minFrameSize, & off)) {
need_frame_offset += (_minFrameSize - 3);
} else {
need_frame_offset += off;
break;
}
};
} else {
need_frame_offset = _vbrFrameOffTable[frame];
}
if (SourceData->seek(need_frame_offset, 0) != need_frame_offset)
return 0;
mp3Reset();
// сбросим декодер
for (int ch = 0; ch < 2; ch++) {
for (int gr = 0; gr < 2; gr++) {
for (int sam = 0; sam < 576; sam++) {
m_sample[ch][gr][sam].s = 0;
m_sample[ch][gr][sam].x = 0;
}
}
}
for (cur = 0; cur < back; cur++) {
SourceData->peek(_frameBuffer, 4);
if (!mp3GetHeaderInfo(_frameBuffer, & _mpegHI))
return 0;
_curFrameSize = _mpegHI.curFrameSize;
if (SourceData->read(_frameBuffer, _curFrameSize) !=
_curFrameSize)
return 0;
_mpegDP.header = _mpegHI.header;
_mpegDP.bitRate = _mpegHI.curBitRate;
_mpegDP.inputBuf = _frameBuffer;
_mpegDP.inputSize = _mpegHI.curFrameSize;
_mpegDP.outputBuf = _sampleBuffer;
_mpegDP.outputSize = _mpegHI.outputSize;
// декодирование одного фрейма
if (!mp3DecodeFrame(&_mpegDP))
return 0;
}
}
}
return 1;
}
//-----------------------------------------------------------------------------
// Конструктор декодера
// на входе : a - указатель на данные файла
// на выходе : *
//-----------------------------------------------------------------------------
CDecompressMpeg::CDecompressMpeg(WAVEFORMATEX* pcm_format, bool& flag,
CAbstractSoundFile* a)
: CAbstractDecompressor(pcm_format, flag, a)
{
DWORD cur;
DWORD pos;
MPEG_HEADER_INFO info;
BYTE head[156];
// файл не определен
flag = false;
// инициализация декодера
mp3DecodeInit();
// инициализация данных декодера
m_cs_factorL1 = m_cs_factor[0];
// m_enableEQ = 0;
memset(&m_side_info, 0, sizeof(SIDE_INFO));
memset(&m_scale_fac, 0, sizeof(SCALE_FACTOR) * 4);
memset(&m_cb_info, 0, sizeof(CB_INFO) * 4);
memset(&m_nsamp, 0, sizeof(int) * 4);
// очистим указатели на буфера
_frameBuffer = 0;
_vbr = 0;
_vbrFrameOffTable = 0;
// получение информаци о файле
if (SourceData->peek(head, sizeof(head)) != sizeof(head))
return;
if (!mp3GetDecodeInfo(head, sizeof(head), & _mpegDI, 1))
return;
if (!mp3GetHeaderInfo(head, & _mpegHI))
return;
// получим интерисующую нас информацию
_channels = _mpegDI.channels;
_frequency = _mpegDI.frequency;
_bitrate = _mpegDI.HeadBitRate;
_vbr = _mpegDI.bitRate ? false : true;
_minFrameSize = _mpegDI.minInputSize;
_maxFrameSize = _mpegDI.maxInputSize;
_samplesInFrame = _mpegHI.samplesInFrame;
_curFrameSize = _mpegHI.curFrameSize;
_version = _mpegDI.header.version;
_layer = _mpegDI.header.layer;
_br_index = _mpegDI.header.br_index;
_fr_index = _mpegDI.header.fr_index;
_mode = _mpegDI.header.mode;
_slotSize = (_mpegDI.header.layer == 1) ? 4 : 1;
_bitPerFrame = (_mpegDI.header.version == 1) ?
(double) (144 * 8 * _bitrate) /
(double) _frequency :
(double) (144 * 8 * _bitrate) /
(double) (_frequency * 2);
_frames = _vbr ?
_mpegDI.frames :
(DWORD) floor(((double) ((SourceData->size + _slotSize) * 8)) /
_bitPerFrame);
_samplesInFile = _frames * _samplesInFrame;
//*********************************************************************************
// отладка
// заполним таблицу смещений
cur = 0;
pos = 0;
while (!SourceData->eof()) {
SourceData->seek(pos, 0);
if (SourceData->peek(head, 4) != 4)
break;
if (!mp3GetHeaderInfo(head, & info))
break;
pos += info.curFrameSize;
cur++;
}
SourceData->seek(0, 0);
if (cur != _frames)
_frames = cur;
_vbr = true;
//**********************************************************************************
// файл с переменным битрейтом ?
if (_vbr) {
// выделим память под таблицу смещений на фреймы
#if AGSS_USE_MALLOC
_vbrFrameOffTable = (DWORD *) malloc(_frames * sizeof(DWORD));
#else
_vbrFrameOffTable = (DWORD *) GlobalAlloc(GPTR,
_frames * sizeof(DWORD));
#endif
if (!_vbrFrameOffTable)
return;
cur = 0;
pos = 0;
// заполним таблицу смещений
while (cur != _frames) {
SourceData->seek(pos, 0);
SourceData->peek(head, 4);
if (!mp3GetHeaderInfo(head, & info))
break;
_vbrFrameOffTable[cur] = pos;
pos += info.curFrameSize;
cur++;
}
SourceData->seek(0, 0);
}
// выделим феймовый буфер
#if AGSS_USE_MALLOC
_frameBuffer = (BYTE *) malloc(_mpegDI.maxInputSize);
#else
_frameBuffer = (BYTE *) GlobalAlloc(GPTR, _mpegDI.maxInputSize);
#endif
if (!_frameBuffer)
return;
// прочитаем один фрейм
if (SourceData->read(_frameBuffer, _curFrameSize) != _curFrameSize) {
#if AGSS_USE_MALLOC
free(_frameBuffer);
#else
GlobalFree(_frameBuffer);
#endif
_frameBuffer = 0;
return;
}
// начало декодирования
if (!mp3DecodeStart(_frameBuffer, _curFrameSize)) {
#if AGSS_USE_MALLOC
free(_frameBuffer);
#else
GlobalFree(_frameBuffer);
#endif
_frameBuffer = 0;
return;
}
// подготовка к декодированию первого фрейма
_mpegDP.header = _mpegDI.header;
_mpegDP.bitRate = _mpegDI.bitRate;
_mpegDP.inputBuf = _frameBuffer;
_mpegDP.inputSize = _curFrameSize;
_mpegDP.outputBuf = _sampleBuffer;
_mpegDP.outputSize = _mpegDI.outputSize;
// декодируем первый фрейм
if (!mp3DecodeFrame(&_mpegDP)) {
#if AGSS_USE_MALLOC
free(_frameBuffer);
#else
GlobalFree(_frameBuffer);
#endif
_frameBuffer = 0;
return;
}
// установим дополнительные параметры
_curFrame = 0;
_curSampleOffset = 0;
// преобразуем данные для Direct X (иначе Direct X не сможет создать буфер)
pcm_format->wFormatTag = 1;
pcm_format->wBitsPerSample = 16;
pcm_format->nSamplesPerSec = _frequency;
pcm_format->nChannels = _channels;
pcm_format->nBlockAlign = (pcm_format->nChannels * pcm_format->wBitsPerSample) >>
3;
pcm_format->nAvgBytesPerSec = pcm_format->nBlockAlign * pcm_format->nSamplesPerSec;
// файл определен
flag = true;
}
//-----------------------------------------------------------------------------
// Деструктор декодера
// на входе : *
// на выходе : *
//-----------------------------------------------------------------------------
CDecompressMpeg::~CDecompressMpeg()
{
if (_vbrFrameOffTable) {
#if AGSS_USE_MALLOC
free(_vbrFrameOffTable);
#else
GlobalFree(_vbrFrameOffTable);
#endif
_vbrFrameOffTable = 0;
}
if (_frameBuffer) {
#if AGSS_USE_MALLOC
free(_frameBuffer);
#else
GlobalFree(_frameBuffer);
#endif
_frameBuffer = 0;
}
}
//-----------------------------------------------------------------------------
// Декомпрессия Mp3 формата в моно данные
// на входе : buffer - указатель на буфер
// start - смещение в данных звука, в семплах
// length - количество семплов для декодирования
// на выходе : На сколько байт сдвинулся буфер в который
// читали семплы
//-----------------------------------------------------------------------------
DWORD CDecompressMpeg::GetMonoSamples(void* buffer, DWORD start, DWORD length,
bool loop)
{
DWORD NeedFrame;
DWORD NeedOffset;
DWORD samples;
DWORD i;
BYTE head[4];
short* dst = (short*) buffer;
// проверка выхода за пределы
if (start > _samplesInFile)
return 0;
// проверка на чтение сверх нормы
if ((start + length) > _samplesInFile)
length = _samplesInFile - start;
// вычислим текущую позицию чтения
NeedFrame = start / _samplesInFrame;
NeedOffset = start % _samplesInFrame;
// позиционируемся на данных
if (!mp3seek(NeedFrame))
return 0;
DWORD remaining = length;
DWORD readsize = 0;
bool readframe = false;
while (remaining) {
if ((_channels == 1) &&
(NeedOffset == 0) &&
(remaining > _samplesInFrame))
readframe = true;
else
readframe = false;
if (_curFrame != NeedFrame) {
_curFrame = NeedFrame;
if (SourceData->peek(&head, 4) != 4)
break;
if (!mp3GetHeaderInfo(head, & _mpegHI))
return 0;
_curFrameSize = _mpegHI.curFrameSize;
if (SourceData->read(_frameBuffer, _curFrameSize) != _curFrameSize)
return 0;
_mpegDP.header = _mpegHI.header;
_mpegDP.bitRate = _mpegHI.curBitRate;
_mpegDP.inputBuf = _frameBuffer;
_mpegDP.inputSize = _mpegHI.curFrameSize;
_mpegDP.outputBuf = (readframe) ? dst : _sampleBuffer;
_mpegDP.outputSize = _mpegHI.outputSize;
// декодирование одного фрейма
if (!mp3DecodeFrame(&_mpegDP))
return 0;
}
samples = _samplesInFrame - NeedOffset;
readsize = (remaining > samples) ? samples : remaining;
short* src = _sampleBuffer + (NeedOffset* _channels);
if (_channels == 1) {
if (!readframe)
memcpy(dst, src, readsize * 2);
dst += readsize;
} else {
for (i = 0; i < readsize; i++) {
int s = ((int) src[0] + (int) src[1]) >> 1;
s = (s < -32768) ? -32768 : (s > 32767) ? 32767 : s;
*dst++ = (short) s;
src += 2;
}
}
NeedOffset = 0;
remaining -= readsize;
if (remaining)
NeedFrame++;
}
return ((DWORD) dst - (DWORD) buffer);
}
//-----------------------------------------------------------------------------
// Декомпрессия Mp3 формата в стерео данные
// на входе : buffer - указатель на буфер
// start - смещение в данных звука, в семплах
// length - количество семплов для декодирования
// на выходе : На сколько байт сдвинулся буфер в который
// читали семплы
//-----------------------------------------------------------------------------
DWORD CDecompressMpeg::GetStereoSamples(void* buffer, DWORD start,
DWORD length, bool loop)
{
DWORD NeedFrame;
DWORD NeedOffset;
// DWORD NeedFrameOffset;
DWORD samples;
DWORD i;
BYTE head[4];
// int off;
short* dst = (short*) buffer;
// проверка выхода за пределы
if (start > _samplesInFile)
return 0;
// проверка на чтение сверх нормы
if ((start + length) > _samplesInFile)
length = _samplesInFile - start;
// вычислим текущую позицию чтения
NeedFrame = start / _samplesInFrame;
NeedOffset = start % _samplesInFrame;
// позиционируемся на данных
if (!mp3seek(NeedFrame))
return 0;
DWORD remaining = length;
DWORD readsize = 0;
bool readframe = false;
while (remaining) {
if ((_channels == 2) &&
(NeedOffset == 0) &&
(remaining > _samplesInFrame))
readframe = true;
else
readframe = false;
if (_curFrame != NeedFrame) {
_curFrame = NeedFrame;
SourceData->peek(&head, 4);
if (!mp3GetHeaderInfo(head, & _mpegHI))
return 0;
_curFrameSize = _mpegHI.curFrameSize;
if (SourceData->read(_frameBuffer, _curFrameSize) != _curFrameSize)
return 0;
_mpegDP.header = _mpegHI.header;
_mpegDP.bitRate = _mpegHI.curBitRate;
_mpegDP.inputBuf = _frameBuffer;
_mpegDP.inputSize = _mpegHI.curFrameSize;
_mpegDP.outputBuf = (readframe) ? dst : _sampleBuffer;
_mpegDP.outputSize = _mpegHI.outputSize;
// декодирование одного фрейма
if (!mp3DecodeFrame(&_mpegDP))
return 0;
}
samples = _samplesInFrame - NeedOffset;
readsize = (remaining > samples) ? samples : remaining;
short* src = _sampleBuffer + (NeedOffset* _channels);
if (_channels == 1) {
for (i = 0; i < readsize; i++) {
*dst++ = *src;
*dst++ = *src;
src++;
}
} else {
if (!readframe)
memcpy(dst, src, readsize * 4);
dst += readsize * 2;
}
NeedOffset = 0;
remaining -= readsize;
if (remaining)
NeedFrame++;
}
return ((DWORD) dst - (DWORD) buffer);
}
//-----------------------------------------------------------------------------
// Создание тишины на заданом отрезке буфера моно режим
// на входе : buffer - указатель на буфер
// length - количество семплов
// на выходе : На сколько байт сдвинулся буфер
//-----------------------------------------------------------------------------
DWORD CDecompressMpeg::GetMonoMute(void* buffer, DWORD length)
{
length <<= 1;
memset(buffer, 0, length);
return length;
}
//-----------------------------------------------------------------------------
// Создание тишины на заданом отрезке буфера стерео режим
// на входе : buffer - указатель на буфер
// length - количество семплов
// на выходе : На сколько байт сдвинулся буфер
//-----------------------------------------------------------------------------
DWORD CDecompressMpeg::GetStereoMute(void* buffer, DWORD length)
{
length <<= 2;
memset(buffer, 0, length);
return length;
}
//-----------------------------------------------------------------------------
// Получение количества семплов в файле
// на входе : *
// на выходе : Количество семплов в файла
//-----------------------------------------------------------------------------
DWORD CDecompressMpeg::GetSamplesInFile(void)
{
return _samplesInFile;
}
//-----------------------------------------------------------------------------
// Получение количества байт в треке моно режим
// на входе : *
// на выходе : Количество баит в треке
//-----------------------------------------------------------------------------
DWORD CDecompressMpeg::GetRealMonoDataSize(void)
{
return _samplesInFile * 2;
}
//-----------------------------------------------------------------------------
// Получение количества байт в треке стерео режим
// на входе : *
// на выходе : Количество баит в треке
//-----------------------------------------------------------------------------
DWORD CDecompressMpeg::GetRealStereoDataSize(void)
{
return _samplesInFile * 4;
}
| gecko0307/squall | source/SoundFile/MP3/MpegDecoder.cpp | C++ | mit | 77,394 |
import { moduleForModel, test } from 'ember-qunit';
import Pretender from 'pretender';
// ToDo: Install ember-cli-faker
import mocks from './mocks';
const {
inventoryMock,
productMock,
componentsMock
} = mocks;
let mockServer;
moduleForModel('inventory', 'Unit | Serializer | inventory', {
needs: ['serializer:application',
'model:product',
'model:inventory',
'model:component'],
beforeEach() {
mockServer = new Pretender(function() {
this.get('/products', function() {
const response = {
records: [productMock]
};
return [200, { "Content-Type": "application/json" }, JSON.stringify(response)];
});
this.get(`/products/${productMock.id}`, function() {
return [200, { "Content-Type": "application/json" }, JSON.stringify(productMock)];
});
this.get('/inventories', function() {
const response = {
records: [inventoryMock]
};
return [200, { "Content-Type": "application/json" }, JSON.stringify(response)];
});
this.get(`/components/${componentsMock[0].id}`, function() {
return [200, { "Content-Type": "application/json" }, JSON.stringify(componentsMock[0])];
});
this.get(`/components/${componentsMock[1].id}`, function() {
return [200, { "Content-Type": "application/json" }, JSON.stringify(componentsMock[1])];
});
});
},
afterEach() {
mockServer.shutdown();
}
});
test('it serializes records', function(assert) {
return this.store().findAll('inventory').then((inventories) => {
assert.equal(inventories.get('length'), 1);
const inventory = inventories.objectAt(0);
assert.ok(inventory.get('created'));
assert.equal(inventory.get('qty'), inventoryMock.fields['qty']);
assert.equal(inventory.get('restock-at'), inventoryMock.fields['restock-at']);
});
});
test('it serializes belongsTo relationship', function(assert) {
return this.store().findAll('inventory').then((inventories) => {
const inventory = inventories.objectAt(0);
inventory.get('product').then((product) => {
assert.equal(product.get('name'), productMock.fields.name);
assert.equal(product.get('description'), productMock.fields.description);
});
});
});
test('it serializes hasMany relationship', function(assert) {
return this.store().findAll('product').then((products) => {
const product = products.objectAt(0);
product.get('components').then((components) => {
components.forEach((component, index) => {
assert.equal(component.get('name'), componentsMock[index].fields.name);
});
});
});
});
| benoror/ember-airtable | tests/unit/serializers/inventory-test.js | JavaScript | mit | 2,679 |
package com.asksunny.batch.tasklets;
public class Demo1 {
long id;
String name;
public Demo1() {
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
| devsunny/app-galleries | batch-flow-process/src/main/java/com/asksunny/batch/tasklets/Demo1.java | Java | mit | 341 |
package net.glowstone.net.codec.play.game;
import com.flowpowered.network.Codec;
import io.netty.buffer.ByteBuf;
import java.io.IOException;
import net.glowstone.net.GlowBufUtils;
import net.glowstone.net.message.play.game.UpdateBlockEntityMessage;
import net.glowstone.util.nbt.CompoundTag;
import org.bukkit.util.BlockVector;
public final class UpdateBlockEntityCodec implements Codec<UpdateBlockEntityMessage> {
@Override
public UpdateBlockEntityMessage decode(ByteBuf buffer) throws IOException {
BlockVector pos = GlowBufUtils.readBlockPosition(buffer);
int action = buffer.readByte();
CompoundTag nbt = GlowBufUtils.readCompound(buffer);
return new UpdateBlockEntityMessage(pos.getBlockX(), pos.getBlockY(), pos.getBlockZ(),
action, nbt);
}
@Override
public ByteBuf encode(ByteBuf buf, UpdateBlockEntityMessage message) throws IOException {
GlowBufUtils.writeBlockPosition(buf, message.getX(), message.getY(), message.getZ());
buf.writeByte(message.getAction());
GlowBufUtils.writeCompound(buf, message.getNbt());
return buf;
}
}
| GlowstoneMC/GlowstonePlusPlus | src/main/java/net/glowstone/net/codec/play/game/UpdateBlockEntityCodec.java | Java | mit | 1,140 |
//---------------------------------------------------------------------
// <copyright file="DuplicateStream.cs" company="Microsoft Corporation">
// Copyright (c) 1999, Microsoft Corporation. All rights reserved.
// </copyright>
// <summary>
// Part of the Deployment Tools Foundation project.
// </summary>
//---------------------------------------------------------------------
namespace Microsoft.PackageManagement.Archivers.Internal.Compression
{
using System;
using System.IO;
/// <summary>
/// Duplicates a source stream by maintaining a separate position.
/// </summary>
/// <remarks>
/// WARNING: duplicate streams are not thread-safe with respect to each other or the original stream.
/// If multiple threads use duplicate copies of the same stream, they must synchronize for any operations.
/// </remarks>
public class DuplicateStream : Stream
{
private Stream source;
private long position;
/// <summary>
/// Creates a new duplicate of a stream.
/// </summary>
/// <param name="source">source of the duplicate</param>
public DuplicateStream(Stream source)
{
if (source == null)
{
throw new ArgumentNullException("source");
}
this.source = DuplicateStream.OriginalStream(source);
}
/// <summary>
/// Gets the original stream that was used to create the duplicate.
/// </summary>
public Stream Source
{
get
{
return this.source;
}
}
/// <summary>
/// Gets a value indicating whether the source stream supports reading.
/// </summary>
/// <value>true if the stream supports reading; otherwise, false.</value>
public override bool CanRead
{
get
{
return this.source.CanRead;
}
}
/// <summary>
/// Gets a value indicating whether the source stream supports writing.
/// </summary>
/// <value>true if the stream supports writing; otherwise, false.</value>
public override bool CanWrite
{
get
{
return this.source.CanWrite;
}
}
/// <summary>
/// Gets a value indicating whether the source stream supports seeking.
/// </summary>
/// <value>true if the stream supports seeking; otherwise, false.</value>
public override bool CanSeek
{
get
{
return this.source.CanSeek;
}
}
/// <summary>
/// Gets the length of the source stream.
/// </summary>
public override long Length
{
get
{
return this.source.Length;
}
}
/// <summary>
/// Gets or sets the position of the current stream,
/// ignoring the position of the source stream.
/// </summary>
public override long Position
{
get
{
return this.position;
}
set
{
this.position = value;
}
}
/// <summary>
/// Retrieves the original stream from a possible duplicate stream.
/// </summary>
/// <param name="stream">Possible duplicate stream.</param>
/// <returns>If the stream is a DuplicateStream, returns
/// the duplicate's source; otherwise returns the same stream.</returns>
public static Stream OriginalStream(Stream stream)
{
DuplicateStream dupStream = stream as DuplicateStream;
return dupStream != null ? dupStream.Source : stream;
}
/// <summary>
/// Flushes the source stream.
/// </summary>
public override void Flush()
{
this.source.Flush();
}
/// <summary>
/// Sets the length of the source stream.
/// </summary>
/// <param name="value">The desired length of the stream in bytes.</param>
public override void SetLength(long value)
{
this.source.SetLength(value);
}
#if !CORECLR
/// <summary>
/// Closes the underlying stream, effectively closing ALL duplicates.
/// </summary>
public override void Close()
{
this.source.Close();
}
#endif
/// <summary>
/// Disposes the stream
/// </summary>
/// <param name="disposing"></param>
protected override void Dispose(bool disposing)
{
if (disposing)
{
this.source.Dispose();
}
}
/// <summary>
/// Reads from the source stream while maintaining a separate position
/// and not impacting the source stream's position.
/// </summary>
/// <param name="buffer">An array of bytes. When this method returns, the buffer
/// contains the specified byte array with the values between offset and
/// (offset + count - 1) replaced by the bytes read from the current source.</param>
/// <param name="offset">The zero-based byte offset in buffer at which to begin
/// storing the data read from the current stream.</param>
/// <param name="count">The maximum number of bytes to be read from the current stream.</param>
/// <returns>The total number of bytes read into the buffer. This can be less
/// than the number of bytes requested if that many bytes are not currently available,
/// or zero (0) if the end of the stream has been reached.</returns>
public override int Read(byte[] buffer, int offset, int count)
{
long saveSourcePosition = this.source.Position;
this.source.Position = this.position;
int read = this.source.Read(buffer, offset, count);
this.position = this.source.Position;
this.source.Position = saveSourcePosition;
return read;
}
/// <summary>
/// Writes to the source stream while maintaining a separate position
/// and not impacting the source stream's position.
/// </summary>
/// <param name="buffer">An array of bytes. This method copies count
/// bytes from buffer to the current stream.</param>
/// <param name="offset">The zero-based byte offset in buffer at which
/// to begin copying bytes to the current stream.</param>
/// <param name="count">The number of bytes to be written to the
/// current stream.</param>
public override void Write(byte[] buffer, int offset, int count)
{
long saveSourcePosition = this.source.Position;
this.source.Position = this.position;
this.source.Write(buffer, offset, count);
this.position = this.source.Position;
this.source.Position = saveSourcePosition;
}
/// <summary>
/// Changes the position of this stream without impacting the
/// source stream's position.
/// </summary>
/// <param name="offset">A byte offset relative to the origin parameter.</param>
/// <param name="origin">A value of type SeekOrigin indicating the reference
/// point used to obtain the new position.</param>
/// <returns>The new position within the current stream.</returns>
public override long Seek(long offset, SeekOrigin origin)
{
long originPosition = 0;
if (origin == SeekOrigin.Current)
{
originPosition = this.position;
}
else if (origin == SeekOrigin.End)
{
originPosition = this.Length;
}
this.position = originPosition + offset;
return this.position;
}
}
}
| OneGet/oneget | src/Microsoft.PackageManagement.ArchiverProviders/Compression/DuplicateStream.cs | C# | mit | 8,313 |
+(function () {
'use strict';
angular
.module('DashboardApplication')
.controller('FileManagerRemoveFolderController', ['$scope', '$q', 'Event', 'FoldersRest', FileManagerRemoveFolderController]);
function FileManagerRemoveFolderController($scope, $q, Event, FoldersRest) {
var vm = this;
var folderId = $scope.ngDialogData.folderId;
vm.removeFolder = removeFolder;
function removeFolder() {
var id = folderId;
var $defer = $q.defer();
FoldersRest.one(id).remove().then(function () {
console.log("FoldersRest");
debugger;
Event.publish('FOLDERS_TREEVIEW_UPDATED');
alert('فولدر با موفقیت حذف شد', 'انجام شد!');
$defer.resolve();
}, function (error) {
$defer.reject(error);
});
return $defer.promise;
}
}
})(); | MozhganNajafi/andisheh-bartar | source/dashboard/components/filemanager/controllers/removefolder.controller.js | JavaScript | mit | 980 |
package co.colector.model.request;
import java.util.ArrayList;
import java.util.List;
import co.colector.ColectorApplication;
import co.colector.R;
import co.colector.model.IdInputValue;
import co.colector.model.IdValue;
import co.colector.model.Survey;
import co.colector.model.AnswerValue;
import co.colector.session.AppSession;
import co.colector.utils.NetworkUtils;
/**
* Created by dherrera on 11/10/15.
*/
public class SendSurveyRequest {
private String colector_id;
private String form_id;
private String longitud;
private String latitud;
private String horaini;
private String horafin;
private List<IdInputValue> responses;
public SendSurveyRequest(Survey survey) {
this.colector_id = String.valueOf(AppSession.getInstance().getUser().getColector_id());
this.form_id = String.valueOf(survey.getForm_id());
this.longitud = survey.getInstanceLongitude();
this.latitud = survey.getInstanceLatitude();
this.horaini = survey.getInstanceHoraIni();
this.horafin = survey.getInstanceHoraFin();
this.setResponsesData(survey.getInstanceAnswers());
}
public List<IdInputValue> getResponses() {
return responses;
}
public void setResponses(List<IdInputValue> responses) {
this.responses = responses;
}
private void setResponsesData(List<IdValue> responsesData) {
responses = new ArrayList<>();
for (IdValue item : responsesData) {
switch (item.getmType()) {
case 6:
case 14:
case 16:
for (AnswerValue answerValue : item.getValue())
if (!answerValue.getValue().equals("")) {
int lastIndex = answerValue.getValue().length();
int slashIndex = answerValue.getValue().lastIndexOf("/");
responses.add(new IdInputValue(String.valueOf(item.getId()), ColectorApplication.getInstance().getString(R.string.image_name_format,
NetworkUtils.getAndroidID(ColectorApplication.getInstance()),
answerValue.getValue().substring((slashIndex + 1), lastIndex))));
}
break;
default:
for (AnswerValue answerValue : item.getValue())
responses.add(new IdInputValue(String.valueOf(item.getId()), answerValue.getValue()));
}
}
}
}
| jlpuma24/colector-android-telecomsoftsrs | app/src/main/java/co/colector/model/request/SendSurveyRequest.java | Java | mit | 2,516 |
package controllers
import (
"github.com/revel/revel"
"AuthKeyPush/app/models"
)
type Auth struct {
*revel.Controller
}
func (c Auth) Github(code string) revel.Result {
login := models.GitHub(code)
if login == true {
c.Session["login"] = "true"
} else {
c.Session["login"] = "false"
c.Session["msg"] = "Login faied. Check conf/site.json or README.md"
}
return c.Redirect("/")
}
| rluisr/AuthKeyPush | app/controllers/auth.go | GO | mit | 397 |
// Copyright (c) 2010 Satoshi Nakamoto
// Copyright (c) 2009-2012 The Bitcoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "init.h"
#include "util.h"
#include "sync.h"
#include "ui_interface.h"
#include "base58.h"
#include "bitcoinrpc.h"
#include "db.h"
#include <boost/asio.hpp>
#include <boost/asio/ip/v6_only.hpp>
#include <boost/bind.hpp>
#include <boost/filesystem.hpp>
#include <boost/foreach.hpp>
#include <boost/iostreams/concepts.hpp>
#include <boost/iostreams/stream.hpp>
#include <boost/algorithm/string.hpp>
#include <boost/lexical_cast.hpp>
#include <boost/asio/ssl.hpp>
#include <boost/filesystem/fstream.hpp>
#include <boost/shared_ptr.hpp>
#include <list>
using namespace std;
using namespace boost;
using namespace boost::asio;
using namespace json_spirit;
static std::string strRPCUserColonPass;
// These are created by StartRPCThreads, destroyed in StopRPCThreads
static asio::io_service* rpc_io_service = NULL;
static ssl::context* rpc_ssl_context = NULL;
static boost::thread_group* rpc_worker_group = NULL;
static inline unsigned short GetDefaultRPCPort()
{
return GetBoolArg("-testnet", false) ? 16540 : 16541;
}
Object JSONRPCError(int code, const string& message)
{
Object error;
error.push_back(Pair("code", code));
error.push_back(Pair("message", message));
return error;
}
void RPCTypeCheck(const Array& params,
const list<Value_type>& typesExpected,
bool fAllowNull)
{
unsigned int i = 0;
BOOST_FOREACH(Value_type t, typesExpected)
{
if (params.size() <= i)
break;
const Value& v = params[i];
if (!((v.type() == t) || (fAllowNull && (v.type() == null_type))))
{
string err = strprintf("Expected type %s, got %s",
Value_type_name[t], Value_type_name[v.type()]);
throw JSONRPCError(RPC_TYPE_ERROR, err);
}
i++;
}
}
void RPCTypeCheck(const Object& o,
const map<string, Value_type>& typesExpected,
bool fAllowNull)
{
BOOST_FOREACH(const PAIRTYPE(string, Value_type)& t, typesExpected)
{
const Value& v = find_value(o, t.first);
if (!fAllowNull && v.type() == null_type)
throw JSONRPCError(RPC_TYPE_ERROR, strprintf("Missing %s", t.first.c_str()));
if (!((v.type() == t.second) || (fAllowNull && (v.type() == null_type))))
{
string err = strprintf("Expected type %s for %s, got %s",
Value_type_name[t.second], t.first.c_str(), Value_type_name[v.type()]);
throw JSONRPCError(RPC_TYPE_ERROR, err);
}
}
}
int64 AmountFromValue(const Value& value)
{
double dAmount = value.get_real();
if (dAmount <= 0.0 || dAmount > 84000000.0)
throw JSONRPCError(RPC_TYPE_ERROR, "Invalid amount");
int64 nAmount = roundint64(dAmount * COIN);
if (!MoneyRange(nAmount))
throw JSONRPCError(RPC_TYPE_ERROR, "Invalid amount");
return nAmount;
}
Value ValueFromAmount(int64 amount)
{
return (double)amount / (double)COIN;
}
std::string HexBits(unsigned int nBits)
{
union {
int32_t nBits;
char cBits[4];
} uBits;
uBits.nBits = htonl((int32_t)nBits);
return HexStr(BEGIN(uBits.cBits), END(uBits.cBits));
}
///
/// Note: This interface may still be subject to change.
///
string CRPCTable::help(string strCommand) const
{
string strRet;
set<rpcfn_type> setDone;
for (map<string, const CRPCCommand*>::const_iterator mi = mapCommands.begin(); mi != mapCommands.end(); ++mi)
{
const CRPCCommand *pcmd = mi->second;
string strMethod = mi->first;
// We already filter duplicates, but these deprecated screw up the sort order
if (strMethod.find("label") != string::npos)
continue;
if (strCommand != "" && strMethod != strCommand)
continue;
if (pcmd->reqWallet && !pwalletMain)
continue;
try
{
Array params;
rpcfn_type pfn = pcmd->actor;
if (setDone.insert(pfn).second)
(*pfn)(params, true);
}
catch (std::exception& e)
{
// Help text is returned in an exception
string strHelp = string(e.what());
if (strCommand == "")
if (strHelp.find('\n') != string::npos)
strHelp = strHelp.substr(0, strHelp.find('\n'));
strRet += strHelp + "\n";
}
}
if (strRet == "")
strRet = strprintf("help: unknown command: %s\n", strCommand.c_str());
strRet = strRet.substr(0,strRet.size()-1);
return strRet;
}
Value help(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw runtime_error(
"help [command]\n"
"List commands, or get help for a command.");
string strCommand;
if (params.size() > 0)
strCommand = params[0].get_str();
return tableRPC.help(strCommand);
}
Value stop(const Array& params, bool fHelp)
{
// Accept the deprecated and ignored 'detach' boolean argument
if (fHelp || params.size() > 1)
throw runtime_error(
"stop\n"
"Stop TMCoin server.");
// Shutdown will take long enough that the response should get back
StartShutdown();
return "TMCoin server stopping";
}
//
// Call Table
//
static const CRPCCommand vRPCCommands[] =
{ // name actor (function) okSafeMode threadSafe reqWallet
// ------------------------ ----------------------- ---------- ---------- ---------
{ "help", &help, true, true, false },
{ "stop", &stop, true, true, false },
{ "getblockcount", &getblockcount, true, false, false },
{ "getbestblockhash", &getbestblockhash, true, false, false },
{ "getconnectioncount", &getconnectioncount, true, false, false },
{ "getpeerinfo", &getpeerinfo, true, false, false },
{ "addnode", &addnode, true, true, false },
{ "getaddednodeinfo", &getaddednodeinfo, true, true, false },
{ "getdifficulty", &getdifficulty, true, false, false },
{ "getnetworkhashps", &getnetworkhashps, true, false, false },
{ "getgenerate", &getgenerate, true, false, false },
{ "setgenerate", &setgenerate, true, false, true },
{ "gethashespersec", &gethashespersec, true, false, false },
{ "getinfo", &getinfo, true, false, false },
{ "getmininginfo", &getmininginfo, true, false, false },
{ "getnewaddress", &getnewaddress, true, false, true },
{ "getaccountaddress", &getaccountaddress, true, false, true },
{ "setaccount", &setaccount, true, false, true },
{ "getaccount", &getaccount, false, false, true },
{ "getaddressesbyaccount", &getaddressesbyaccount, true, false, true },
{ "sendtoaddress", &sendtoaddress, false, false, true },
{ "getreceivedbyaddress", &getreceivedbyaddress, false, false, true },
{ "getreceivedbyaccount", &getreceivedbyaccount, false, false, true },
{ "listreceivedbyaddress", &listreceivedbyaddress, false, false, true },
{ "listreceivedbyaccount", &listreceivedbyaccount, false, false, true },
{ "backupwallet", &backupwallet, true, false, true },
{ "keypoolrefill", &keypoolrefill, true, false, true },
{ "walletpassphrase", &walletpassphrase, true, false, true },
{ "walletpassphrasechange", &walletpassphrasechange, false, false, true },
{ "walletlock", &walletlock, true, false, true },
{ "encryptwallet", &encryptwallet, false, false, true },
{ "validateaddress", &validateaddress, true, false, false },
{ "getbalance", &getbalance, false, false, true },
{ "move", &movecmd, false, false, true },
{ "sendfrom", &sendfrom, false, false, true },
{ "sendmany", &sendmany, false, false, true },
{ "addmultisigaddress", &addmultisigaddress, false, false, true },
{ "createmultisig", &createmultisig, true, true , false },
{ "getrawmempool", &getrawmempool, true, false, false },
{ "getblock", &getblock, false, false, false },
{ "getblockhash", &getblockhash, false, false, false },
{ "gettransaction", &gettransaction, false, false, true },
{ "listtransactions", &listtransactions, false, false, true },
{ "listaddressgroupings", &listaddressgroupings, false, false, true },
{ "signmessage", &signmessage, false, false, true },
{ "verifymessage", &verifymessage, false, false, false },
{ "getwork", &getwork, true, false, true },
{ "getworkex", &getworkex, true, false, true },
{ "listaccounts", &listaccounts, false, false, true },
{ "settxfee", &settxfee, false, false, true },
{ "getblocktemplate", &getblocktemplate, true, false, false },
{ "submitblock", &submitblock, false, false, false },
{ "setmininput", &setmininput, false, false, false },
{ "listsinceblock", &listsinceblock, false, false, true },
{ "makekeypair", &makekeypair, true, false, true },
{ "dumpprivkey", &dumpprivkey, true, false, true },
{ "importprivkey", &importprivkey, false, false, true },
{ "listunspent", &listunspent, false, false, true },
{ "getrawtransaction", &getrawtransaction, false, false, false },
{ "createrawtransaction", &createrawtransaction, false, false, false },
{ "decoderawtransaction", &decoderawtransaction, false, false, false },
{ "signrawtransaction", &signrawtransaction, false, false, false },
{ "sendrawtransaction", &sendrawtransaction, false, false, false },
{ "gettxoutsetinfo", &gettxoutsetinfo, true, false, false },
{ "gettxout", &gettxout, true, false, false },
{ "lockunspent", &lockunspent, false, false, true },
{ "listlockunspent", &listlockunspent, false, false, true },
{ "verifychain", &verifychain, true, false, false },
};
CRPCTable::CRPCTable()
{
unsigned int vcidx;
for (vcidx = 0; vcidx < (sizeof(vRPCCommands) / sizeof(vRPCCommands[0])); vcidx++)
{
const CRPCCommand *pcmd;
pcmd = &vRPCCommands[vcidx];
mapCommands[pcmd->name] = pcmd;
}
}
const CRPCCommand *CRPCTable::operator[](string name) const
{
map<string, const CRPCCommand*>::const_iterator it = mapCommands.find(name);
if (it == mapCommands.end())
return NULL;
return (*it).second;
}
//
// HTTP protocol
//
// This ain't Apache. We're just using HTTP header for the length field
// and to be compatible with other JSON-RPC implementations.
//
string HTTPPost(const string& strMsg, const map<string,string>& mapRequestHeaders)
{
ostringstream s;
s << "POST / HTTP/1.1\r\n"
<< "User-Agent: tmcoin-json-rpc/" << FormatFullVersion() << "\r\n"
<< "Host: 127.0.0.1\r\n"
<< "Content-Type: application/json\r\n"
<< "Content-Length: " << strMsg.size() << "\r\n"
<< "Connection: close\r\n"
<< "Accept: application/json\r\n";
BOOST_FOREACH(const PAIRTYPE(string, string)& item, mapRequestHeaders)
s << item.first << ": " << item.second << "\r\n";
s << "\r\n" << strMsg;
return s.str();
}
string rfc1123Time()
{
char buffer[64];
time_t now;
time(&now);
struct tm* now_gmt = gmtime(&now);
string locale(setlocale(LC_TIME, NULL));
setlocale(LC_TIME, "C"); // we want POSIX (aka "C") weekday/month strings
strftime(buffer, sizeof(buffer), "%a, %d %b %Y %H:%M:%S +0000", now_gmt);
setlocale(LC_TIME, locale.c_str());
return string(buffer);
}
static string HTTPReply(int nStatus, const string& strMsg, bool keepalive)
{
if (nStatus == HTTP_UNAUTHORIZED)
return strprintf("HTTP/1.0 401 Authorization Required\r\n"
"Date: %s\r\n"
"Server: tmcoin-json-rpc/%s\r\n"
"WWW-Authenticate: Basic realm=\"jsonrpc\"\r\n"
"Content-Type: text/html\r\n"
"Content-Length: 296\r\n"
"\r\n"
"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\"\r\n"
"\"http://www.w3.org/TR/1999/REC-html401-19991224/loose.dtd\">\r\n"
"<HTML>\r\n"
"<HEAD>\r\n"
"<TITLE>Error</TITLE>\r\n"
"<META HTTP-EQUIV='Content-Type' CONTENT='text/html; charset=ISO-8859-1'>\r\n"
"</HEAD>\r\n"
"<BODY><H1>401 Unauthorized.</H1></BODY>\r\n"
"</HTML>\r\n", rfc1123Time().c_str(), FormatFullVersion().c_str());
const char *cStatus;
if (nStatus == HTTP_OK) cStatus = "OK";
else if (nStatus == HTTP_BAD_REQUEST) cStatus = "Bad Request";
else if (nStatus == HTTP_FORBIDDEN) cStatus = "Forbidden";
else if (nStatus == HTTP_NOT_FOUND) cStatus = "Not Found";
else if (nStatus == HTTP_INTERNAL_SERVER_ERROR) cStatus = "Internal Server Error";
else cStatus = "";
return strprintf(
"HTTP/1.1 %d %s\r\n"
"Date: %s\r\n"
"Connection: %s\r\n"
"Content-Length: %"PRIszu"\r\n"
"Content-Type: application/json\r\n"
"Server: tmcoin-json-rpc/%s\r\n"
"\r\n"
"%s",
nStatus,
cStatus,
rfc1123Time().c_str(),
keepalive ? "keep-alive" : "close",
strMsg.size(),
FormatFullVersion().c_str(),
strMsg.c_str());
}
bool ReadHTTPRequestLine(std::basic_istream<char>& stream, int &proto,
string& http_method, string& http_uri)
{
string str;
getline(stream, str);
// HTTP request line is space-delimited
vector<string> vWords;
boost::split(vWords, str, boost::is_any_of(" "));
if (vWords.size() < 2)
return false;
// HTTP methods permitted: GET, POST
http_method = vWords[0];
if (http_method != "GET" && http_method != "POST")
return false;
// HTTP URI must be an absolute path, relative to current host
http_uri = vWords[1];
if (http_uri.size() == 0 || http_uri[0] != '/')
return false;
// parse proto, if present
string strProto = "";
if (vWords.size() > 2)
strProto = vWords[2];
proto = 0;
const char *ver = strstr(strProto.c_str(), "HTTP/1.");
if (ver != NULL)
proto = atoi(ver+7);
return true;
}
int ReadHTTPStatus(std::basic_istream<char>& stream, int &proto)
{
string str;
getline(stream, str);
vector<string> vWords;
boost::split(vWords, str, boost::is_any_of(" "));
if (vWords.size() < 2)
return HTTP_INTERNAL_SERVER_ERROR;
proto = 0;
const char *ver = strstr(str.c_str(), "HTTP/1.");
if (ver != NULL)
proto = atoi(ver+7);
return atoi(vWords[1].c_str());
}
int ReadHTTPHeaders(std::basic_istream<char>& stream, map<string, string>& mapHeadersRet)
{
int nLen = 0;
loop
{
string str;
std::getline(stream, str);
if (str.empty() || str == "\r")
break;
string::size_type nColon = str.find(":");
if (nColon != string::npos)
{
string strHeader = str.substr(0, nColon);
boost::trim(strHeader);
boost::to_lower(strHeader);
string strValue = str.substr(nColon+1);
boost::trim(strValue);
mapHeadersRet[strHeader] = strValue;
if (strHeader == "content-length")
nLen = atoi(strValue.c_str());
}
}
return nLen;
}
int ReadHTTPMessage(std::basic_istream<char>& stream, map<string,
string>& mapHeadersRet, string& strMessageRet,
int nProto)
{
mapHeadersRet.clear();
strMessageRet = "";
// Read header
int nLen = ReadHTTPHeaders(stream, mapHeadersRet);
if (nLen < 0 || nLen > (int)MAX_SIZE)
return HTTP_INTERNAL_SERVER_ERROR;
// Read message
if (nLen > 0)
{
vector<char> vch(nLen);
stream.read(&vch[0], nLen);
strMessageRet = string(vch.begin(), vch.end());
}
string sConHdr = mapHeadersRet["connection"];
if ((sConHdr != "close") && (sConHdr != "keep-alive"))
{
if (nProto >= 1)
mapHeadersRet["connection"] = "keep-alive";
else
mapHeadersRet["connection"] = "close";
}
return HTTP_OK;
}
bool HTTPAuthorized(map<string, string>& mapHeaders)
{
string strAuth = mapHeaders["authorization"];
if (strAuth.substr(0,6) != "Basic ")
return false;
string strUserPass64 = strAuth.substr(6); boost::trim(strUserPass64);
string strUserPass = DecodeBase64(strUserPass64);
return TimingResistantEqual(strUserPass, strRPCUserColonPass);
}
//
// JSON-RPC protocol. Bitcoin speaks version 1.0 for maximum compatibility,
// but uses JSON-RPC 1.1/2.0 standards for parts of the 1.0 standard that were
// unspecified (HTTP errors and contents of 'error').
//
// 1.0 spec: http://json-rpc.org/wiki/specification
// 1.2 spec: http://groups.google.com/group/json-rpc/web/json-rpc-over-http
// http://www.codeproject.com/KB/recipes/JSON_Spirit.aspx
//
string JSONRPCRequest(const string& strMethod, const Array& params, const Value& id)
{
Object request;
request.push_back(Pair("method", strMethod));
request.push_back(Pair("params", params));
request.push_back(Pair("id", id));
return write_string(Value(request), false) + "\n";
}
Object JSONRPCReplyObj(const Value& result, const Value& error, const Value& id)
{
Object reply;
if (error.type() != null_type)
reply.push_back(Pair("result", Value::null));
else
reply.push_back(Pair("result", result));
reply.push_back(Pair("error", error));
reply.push_back(Pair("id", id));
return reply;
}
string JSONRPCReply(const Value& result, const Value& error, const Value& id)
{
Object reply = JSONRPCReplyObj(result, error, id);
return write_string(Value(reply), false) + "\n";
}
void ErrorReply(std::ostream& stream, const Object& objError, const Value& id)
{
// Send error reply from json-rpc error object
int nStatus = HTTP_INTERNAL_SERVER_ERROR;
int code = find_value(objError, "code").get_int();
if (code == RPC_INVALID_REQUEST) nStatus = HTTP_BAD_REQUEST;
else if (code == RPC_METHOD_NOT_FOUND) nStatus = HTTP_NOT_FOUND;
string strReply = JSONRPCReply(Value::null, objError, id);
stream << HTTPReply(nStatus, strReply, false) << std::flush;
}
bool ClientAllowed(const boost::asio::ip::address& address)
{
// Make sure that IPv4-compatible and IPv4-mapped IPv6 addresses are treated as IPv4 addresses
if (address.is_v6()
&& (address.to_v6().is_v4_compatible()
|| address.to_v6().is_v4_mapped()))
return ClientAllowed(address.to_v6().to_v4());
if (address == asio::ip::address_v4::loopback()
|| address == asio::ip::address_v6::loopback()
|| (address.is_v4()
// Check whether IPv4 addresses match 127.0.0.0/8 (loopback subnet)
&& (address.to_v4().to_ulong() & 0xff000000) == 0x7f000000))
return true;
const string strAddress = address.to_string();
const vector<string>& vAllow = mapMultiArgs["-rpcallowip"];
BOOST_FOREACH(string strAllow, vAllow)
if (WildcardMatch(strAddress, strAllow))
return true;
return false;
}
//
// IOStream device that speaks SSL but can also speak non-SSL
//
template <typename Protocol>
class SSLIOStreamDevice : public iostreams::device<iostreams::bidirectional> {
public:
SSLIOStreamDevice(asio::ssl::stream<typename Protocol::socket> &streamIn, bool fUseSSLIn) : stream(streamIn)
{
fUseSSL = fUseSSLIn;
fNeedHandshake = fUseSSLIn;
}
void handshake(ssl::stream_base::handshake_type role)
{
if (!fNeedHandshake) return;
fNeedHandshake = false;
stream.handshake(role);
}
std::streamsize read(char* s, std::streamsize n)
{
handshake(ssl::stream_base::server); // HTTPS servers read first
if (fUseSSL) return stream.read_some(asio::buffer(s, n));
return stream.next_layer().read_some(asio::buffer(s, n));
}
std::streamsize write(const char* s, std::streamsize n)
{
handshake(ssl::stream_base::client); // HTTPS clients write first
if (fUseSSL) return asio::write(stream, asio::buffer(s, n));
return asio::write(stream.next_layer(), asio::buffer(s, n));
}
bool connect(const std::string& server, const std::string& port)
{
ip::tcp::resolver resolver(stream.get_io_service());
ip::tcp::resolver::query query(server.c_str(), port.c_str());
ip::tcp::resolver::iterator endpoint_iterator = resolver.resolve(query);
ip::tcp::resolver::iterator end;
boost::system::error_code error = asio::error::host_not_found;
while (error && endpoint_iterator != end)
{
stream.lowest_layer().close();
stream.lowest_layer().connect(*endpoint_iterator++, error);
}
if (error)
return false;
return true;
}
private:
bool fNeedHandshake;
bool fUseSSL;
asio::ssl::stream<typename Protocol::socket>& stream;
};
class AcceptedConnection
{
public:
virtual ~AcceptedConnection() {}
virtual std::iostream& stream() = 0;
virtual std::string peer_address_to_string() const = 0;
virtual void close() = 0;
};
template <typename Protocol>
class AcceptedConnectionImpl : public AcceptedConnection
{
public:
AcceptedConnectionImpl(
asio::io_service& io_service,
ssl::context &context,
bool fUseSSL) :
sslStream(io_service, context),
_d(sslStream, fUseSSL),
_stream(_d)
{
}
virtual std::iostream& stream()
{
return _stream;
}
virtual std::string peer_address_to_string() const
{
return peer.address().to_string();
}
virtual void close()
{
_stream.close();
}
typename Protocol::endpoint peer;
asio::ssl::stream<typename Protocol::socket> sslStream;
private:
SSLIOStreamDevice<Protocol> _d;
iostreams::stream< SSLIOStreamDevice<Protocol> > _stream;
};
void ServiceConnection(AcceptedConnection *conn);
// Forward declaration required for RPCListen
template <typename Protocol, typename SocketAcceptorService>
static void RPCAcceptHandler(boost::shared_ptr< basic_socket_acceptor<Protocol, SocketAcceptorService> > acceptor,
ssl::context& context,
bool fUseSSL,
AcceptedConnection* conn,
const boost::system::error_code& error);
/**
* Sets up I/O resources to accept and handle a new connection.
*/
template <typename Protocol, typename SocketAcceptorService>
static void RPCListen(boost::shared_ptr< basic_socket_acceptor<Protocol, SocketAcceptorService> > acceptor,
ssl::context& context,
const bool fUseSSL)
{
// Accept connection
AcceptedConnectionImpl<Protocol>* conn = new AcceptedConnectionImpl<Protocol>(acceptor->get_io_service(), context, fUseSSL);
acceptor->async_accept(
conn->sslStream.lowest_layer(),
conn->peer,
boost::bind(&RPCAcceptHandler<Protocol, SocketAcceptorService>,
acceptor,
boost::ref(context),
fUseSSL,
conn,
boost::asio::placeholders::error));
}
/**
* Accept and handle incoming connection.
*/
template <typename Protocol, typename SocketAcceptorService>
static void RPCAcceptHandler(boost::shared_ptr< basic_socket_acceptor<Protocol, SocketAcceptorService> > acceptor,
ssl::context& context,
const bool fUseSSL,
AcceptedConnection* conn,
const boost::system::error_code& error)
{
// Immediately start accepting new connections, except when we're cancelled or our socket is closed.
if (error != asio::error::operation_aborted && acceptor->is_open())
RPCListen(acceptor, context, fUseSSL);
AcceptedConnectionImpl<ip::tcp>* tcp_conn = dynamic_cast< AcceptedConnectionImpl<ip::tcp>* >(conn);
// TODO: Actually handle errors
if (error)
{
delete conn;
}
// Restrict callers by IP. It is important to
// do this before starting client thread, to filter out
// certain DoS and misbehaving clients.
else if (tcp_conn && !ClientAllowed(tcp_conn->peer.address()))
{
// Only send a 403 if we're not using SSL to prevent a DoS during the SSL handshake.
if (!fUseSSL)
conn->stream() << HTTPReply(HTTP_FORBIDDEN, "", false) << std::flush;
delete conn;
}
else {
ServiceConnection(conn);
conn->close();
delete conn;
}
}
void StartRPCThreads()
{
strRPCUserColonPass = mapArgs["-rpcuser"] + ":" + mapArgs["-rpcpassword"];
if ((mapArgs["-rpcpassword"] == "") ||
(mapArgs["-rpcuser"] == mapArgs["-rpcpassword"]))
{
unsigned char rand_pwd[32];
RAND_bytes(rand_pwd, 32);
string strWhatAmI = "To use tmcoind";
if (mapArgs.count("-server"))
strWhatAmI = strprintf(_("To use the %s option"), "\"-server\"");
else if (mapArgs.count("-daemon"))
strWhatAmI = strprintf(_("To use the %s option"), "\"-daemon\"");
uiInterface.ThreadSafeMessageBox(strprintf(
_("%s, you must set a rpcpassword in the configuration file:\n"
"%s\n"
"It is recommended you use the following random password:\n"
"rpcuser=tmcoinrpc\n"
"rpcpassword=%s\n"
"(you do not need to remember this password)\n"
"The username and password MUST NOT be the same.\n"
"If the file does not exist, create it with owner-readable-only file permissions.\n"
"It is also recommended to set alertnotify so you are notified of problems;\n"
"for example: alertnotify=echo %%s | mail -s \"TMCoin Alert\" admin@foo.com\n"),
strWhatAmI.c_str(),
GetConfigFile().string().c_str(),
EncodeBase58(&rand_pwd[0],&rand_pwd[0]+32).c_str()),
"", CClientUIInterface::MSG_ERROR);
StartShutdown();
return;
}
assert(rpc_io_service == NULL);
rpc_io_service = new asio::io_service();
rpc_ssl_context = new ssl::context(*rpc_io_service, ssl::context::sslv23);
const bool fUseSSL = GetBoolArg("-rpcssl");
if (fUseSSL)
{
rpc_ssl_context->set_options(ssl::context::no_sslv2);
filesystem::path pathCertFile(GetArg("-rpcsslcertificatechainfile", "server.cert"));
if (!pathCertFile.is_complete()) pathCertFile = filesystem::path(GetDataDir()) / pathCertFile;
if (filesystem::exists(pathCertFile)) rpc_ssl_context->use_certificate_chain_file(pathCertFile.string());
else printf("ThreadRPCServer ERROR: missing server certificate file %s\n", pathCertFile.string().c_str());
filesystem::path pathPKFile(GetArg("-rpcsslprivatekeyfile", "server.pem"));
if (!pathPKFile.is_complete()) pathPKFile = filesystem::path(GetDataDir()) / pathPKFile;
if (filesystem::exists(pathPKFile)) rpc_ssl_context->use_private_key_file(pathPKFile.string(), ssl::context::pem);
else printf("ThreadRPCServer ERROR: missing server private key file %s\n", pathPKFile.string().c_str());
string strCiphers = GetArg("-rpcsslciphers", "TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH");
SSL_CTX_set_cipher_list(rpc_ssl_context->impl(), strCiphers.c_str());
}
// Try a dual IPv6/IPv4 socket, falling back to separate IPv4 and IPv6 sockets
const bool loopback = !mapArgs.count("-rpcallowip");
asio::ip::address bindAddress = loopback ? asio::ip::address_v6::loopback() : asio::ip::address_v6::any();
ip::tcp::endpoint endpoint(bindAddress, GetArg("-rpcport", GetDefaultRPCPort()));
boost::system::error_code v6_only_error;
boost::shared_ptr<ip::tcp::acceptor> acceptor(new ip::tcp::acceptor(*rpc_io_service));
bool fListening = false;
std::string strerr;
try
{
acceptor->open(endpoint.protocol());
acceptor->set_option(boost::asio::ip::tcp::acceptor::reuse_address(true));
// Try making the socket dual IPv6/IPv4 (if listening on the "any" address)
acceptor->set_option(boost::asio::ip::v6_only(loopback), v6_only_error);
acceptor->bind(endpoint);
acceptor->listen(socket_base::max_connections);
RPCListen(acceptor, *rpc_ssl_context, fUseSSL);
fListening = true;
}
catch(boost::system::system_error &e)
{
strerr = strprintf(_("An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s"), endpoint.port(), e.what());
}
try {
// If dual IPv6/IPv4 failed (or we're opening loopback interfaces only), open IPv4 separately
if (!fListening || loopback || v6_only_error)
{
bindAddress = loopback ? asio::ip::address_v4::loopback() : asio::ip::address_v4::any();
endpoint.address(bindAddress);
acceptor.reset(new ip::tcp::acceptor(*rpc_io_service));
acceptor->open(endpoint.protocol());
acceptor->set_option(boost::asio::ip::tcp::acceptor::reuse_address(true));
acceptor->bind(endpoint);
acceptor->listen(socket_base::max_connections);
RPCListen(acceptor, *rpc_ssl_context, fUseSSL);
fListening = true;
}
}
catch(boost::system::system_error &e)
{
strerr = strprintf(_("An error occurred while setting up the RPC port %u for listening on IPv4: %s"), endpoint.port(), e.what());
}
if (!fListening) {
uiInterface.ThreadSafeMessageBox(strerr, "", CClientUIInterface::MSG_ERROR);
StartShutdown();
return;
}
rpc_worker_group = new boost::thread_group();
for (int i = 0; i < GetArg("-rpcthreads", 4); i++)
rpc_worker_group->create_thread(boost::bind(&asio::io_service::run, rpc_io_service));
}
void StopRPCThreads()
{
if (rpc_io_service == NULL) return;
rpc_io_service->stop();
rpc_worker_group->join_all();
delete rpc_worker_group; rpc_worker_group = NULL;
delete rpc_ssl_context; rpc_ssl_context = NULL;
delete rpc_io_service; rpc_io_service = NULL;
}
class JSONRequest
{
public:
Value id;
string strMethod;
Array params;
JSONRequest() { id = Value::null; }
void parse(const Value& valRequest);
};
void JSONRequest::parse(const Value& valRequest)
{
// Parse request
if (valRequest.type() != obj_type)
throw JSONRPCError(RPC_INVALID_REQUEST, "Invalid Request object");
const Object& request = valRequest.get_obj();
// Parse id now so errors from here on will have the id
id = find_value(request, "id");
// Parse method
Value valMethod = find_value(request, "method");
if (valMethod.type() == null_type)
throw JSONRPCError(RPC_INVALID_REQUEST, "Missing method");
if (valMethod.type() != str_type)
throw JSONRPCError(RPC_INVALID_REQUEST, "Method must be a string");
strMethod = valMethod.get_str();
if (strMethod != "getwork" && strMethod != "getworkex" && strMethod != "getblocktemplate")
printf("ThreadRPCServer method=%s\n", strMethod.c_str());
// Parse params
Value valParams = find_value(request, "params");
if (valParams.type() == array_type)
params = valParams.get_array();
else if (valParams.type() == null_type)
params = Array();
else
throw JSONRPCError(RPC_INVALID_REQUEST, "Params must be an array");
}
static Object JSONRPCExecOne(const Value& req)
{
Object rpc_result;
JSONRequest jreq;
try {
jreq.parse(req);
Value result = tableRPC.execute(jreq.strMethod, jreq.params);
rpc_result = JSONRPCReplyObj(result, Value::null, jreq.id);
}
catch (Object& objError)
{
rpc_result = JSONRPCReplyObj(Value::null, objError, jreq.id);
}
catch (std::exception& e)
{
rpc_result = JSONRPCReplyObj(Value::null,
JSONRPCError(RPC_PARSE_ERROR, e.what()), jreq.id);
}
return rpc_result;
}
static string JSONRPCExecBatch(const Array& vReq)
{
Array ret;
for (unsigned int reqIdx = 0; reqIdx < vReq.size(); reqIdx++)
ret.push_back(JSONRPCExecOne(vReq[reqIdx]));
return write_string(Value(ret), false) + "\n";
}
void ServiceConnection(AcceptedConnection *conn)
{
bool fRun = true;
while (fRun)
{
int nProto = 0;
map<string, string> mapHeaders;
string strRequest, strMethod, strURI;
// Read HTTP request line
if (!ReadHTTPRequestLine(conn->stream(), nProto, strMethod, strURI))
break;
// Read HTTP message headers and body
ReadHTTPMessage(conn->stream(), mapHeaders, strRequest, nProto);
if (strURI != "/") {
conn->stream() << HTTPReply(HTTP_NOT_FOUND, "", false) << std::flush;
break;
}
// Check authorization
if (mapHeaders.count("authorization") == 0)
{
conn->stream() << HTTPReply(HTTP_UNAUTHORIZED, "", false) << std::flush;
break;
}
if (!HTTPAuthorized(mapHeaders))
{
printf("ThreadRPCServer incorrect password attempt from %s\n", conn->peer_address_to_string().c_str());
/* Deter brute-forcing short passwords.
If this results in a DOS the user really
shouldn't have their RPC port exposed.*/
if (mapArgs["-rpcpassword"].size() < 20)
MilliSleep(250);
conn->stream() << HTTPReply(HTTP_UNAUTHORIZED, "", false) << std::flush;
break;
}
if (mapHeaders["connection"] == "close")
fRun = false;
JSONRequest jreq;
try
{
// Parse request
Value valRequest;
if (!read_string(strRequest, valRequest))
throw JSONRPCError(RPC_PARSE_ERROR, "Parse error");
string strReply;
// singleton request
if (valRequest.type() == obj_type) {
jreq.parse(valRequest);
Value result = tableRPC.execute(jreq.strMethod, jreq.params);
// Send reply
strReply = JSONRPCReply(result, Value::null, jreq.id);
// array of requests
} else if (valRequest.type() == array_type)
strReply = JSONRPCExecBatch(valRequest.get_array());
else
throw JSONRPCError(RPC_PARSE_ERROR, "Top-level object parse error");
conn->stream() << HTTPReply(HTTP_OK, strReply, fRun) << std::flush;
}
catch (Object& objError)
{
ErrorReply(conn->stream(), objError, jreq.id);
break;
}
catch (std::exception& e)
{
ErrorReply(conn->stream(), JSONRPCError(RPC_PARSE_ERROR, e.what()), jreq.id);
break;
}
}
}
json_spirit::Value CRPCTable::execute(const std::string &strMethod, const json_spirit::Array ¶ms) const
{
// Find method
const CRPCCommand *pcmd = tableRPC[strMethod];
if (!pcmd)
throw JSONRPCError(RPC_METHOD_NOT_FOUND, "Method not found");
if (pcmd->reqWallet && !pwalletMain)
throw JSONRPCError(RPC_METHOD_NOT_FOUND, "Method not found (disabled)");
// Observe safe mode
string strWarning = GetWarnings("rpc");
if (strWarning != "" && !GetBoolArg("-disablesafemode") &&
!pcmd->okSafeMode)
throw JSONRPCError(RPC_FORBIDDEN_BY_SAFE_MODE, string("Safe mode: ") + strWarning);
try
{
// Execute
Value result;
{
if (pcmd->threadSafe)
result = pcmd->actor(params, false);
else if (!pwalletMain) {
LOCK(cs_main);
result = pcmd->actor(params, false);
} else {
LOCK2(cs_main, pwalletMain->cs_wallet);
result = pcmd->actor(params, false);
}
}
return result;
}
catch (std::exception& e)
{
throw JSONRPCError(RPC_MISC_ERROR, e.what());
}
}
Object CallRPC(const string& strMethod, const Array& params)
{
if (mapArgs["-rpcuser"] == "" && mapArgs["-rpcpassword"] == "")
throw runtime_error(strprintf(
_("You must set rpcpassword=<password> in the configuration file:\n%s\n"
"If the file does not exist, create it with owner-readable-only file permissions."),
GetConfigFile().string().c_str()));
// Connect to localhost
bool fUseSSL = GetBoolArg("-rpcssl");
asio::io_service io_service;
ssl::context context(io_service, ssl::context::sslv23);
context.set_options(ssl::context::no_sslv2);
asio::ssl::stream<asio::ip::tcp::socket> sslStream(io_service, context);
SSLIOStreamDevice<asio::ip::tcp> d(sslStream, fUseSSL);
iostreams::stream< SSLIOStreamDevice<asio::ip::tcp> > stream(d);
if (!d.connect(GetArg("-rpcconnect", "127.0.0.1"), GetArg("-rpcport", itostr(GetDefaultRPCPort()))))
throw runtime_error("couldn't connect to server");
// HTTP basic authentication
string strUserPass64 = EncodeBase64(mapArgs["-rpcuser"] + ":" + mapArgs["-rpcpassword"]);
map<string, string> mapRequestHeaders;
mapRequestHeaders["Authorization"] = string("Basic ") + strUserPass64;
// Send request
string strRequest = JSONRPCRequest(strMethod, params, 1);
string strPost = HTTPPost(strRequest, mapRequestHeaders);
stream << strPost << std::flush;
// Receive HTTP reply status
int nProto = 0;
int nStatus = ReadHTTPStatus(stream, nProto);
// Receive HTTP reply message headers and body
map<string, string> mapHeaders;
string strReply;
ReadHTTPMessage(stream, mapHeaders, strReply, nProto);
if (nStatus == HTTP_UNAUTHORIZED)
throw runtime_error("incorrect rpcuser or rpcpassword (authorization failed)");
else if (nStatus >= 400 && nStatus != HTTP_BAD_REQUEST && nStatus != HTTP_NOT_FOUND && nStatus != HTTP_INTERNAL_SERVER_ERROR)
throw runtime_error(strprintf("server returned HTTP error %d", nStatus));
else if (strReply.empty())
throw runtime_error("no response from server");
// Parse reply
Value valReply;
if (!read_string(strReply, valReply))
throw runtime_error("couldn't parse reply from server");
const Object& reply = valReply.get_obj();
if (reply.empty())
throw runtime_error("expected reply to have result, error and id properties");
return reply;
}
template<typename T>
void ConvertTo(Value& value, bool fAllowNull=false)
{
if (fAllowNull && value.type() == null_type)
return;
if (value.type() == str_type)
{
// reinterpret string as unquoted json value
Value value2;
string strJSON = value.get_str();
if (!read_string(strJSON, value2))
throw runtime_error(string("Error parsing JSON:")+strJSON);
ConvertTo<T>(value2, fAllowNull);
value = value2;
}
else
{
value = value.get_value<T>();
}
}
// Convert strings to command-specific RPC representation
Array RPCConvertValues(const std::string &strMethod, const std::vector<std::string> &strParams)
{
Array params;
BOOST_FOREACH(const std::string ¶m, strParams)
params.push_back(param);
int n = params.size();
//
// Special case non-string parameter types
//
if (strMethod == "stop" && n > 0) ConvertTo<bool>(params[0]);
if (strMethod == "getaddednodeinfo" && n > 0) ConvertTo<bool>(params[0]);
if (strMethod == "setgenerate" && n > 0) ConvertTo<bool>(params[0]);
if (strMethod == "setgenerate" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "getnetworkhashps" && n > 0) ConvertTo<boost::int64_t>(params[0]);
if (strMethod == "getnetworkhashps" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "sendtoaddress" && n > 1) ConvertTo<double>(params[1]);
if (strMethod == "settxfee" && n > 0) ConvertTo<double>(params[0]);
if (strMethod == "setmininput" && n > 0) ConvertTo<double>(params[0]);
if (strMethod == "getreceivedbyaddress" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "getreceivedbyaccount" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "listreceivedbyaddress" && n > 0) ConvertTo<boost::int64_t>(params[0]);
if (strMethod == "listreceivedbyaddress" && n > 1) ConvertTo<bool>(params[1]);
if (strMethod == "listreceivedbyaccount" && n > 0) ConvertTo<boost::int64_t>(params[0]);
if (strMethod == "listreceivedbyaccount" && n > 1) ConvertTo<bool>(params[1]);
if (strMethod == "getbalance" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "getblockhash" && n > 0) ConvertTo<boost::int64_t>(params[0]);
if (strMethod == "move" && n > 2) ConvertTo<double>(params[2]);
if (strMethod == "move" && n > 3) ConvertTo<boost::int64_t>(params[3]);
if (strMethod == "sendfrom" && n > 2) ConvertTo<double>(params[2]);
if (strMethod == "sendfrom" && n > 3) ConvertTo<boost::int64_t>(params[3]);
if (strMethod == "listtransactions" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "listtransactions" && n > 2) ConvertTo<boost::int64_t>(params[2]);
if (strMethod == "listaccounts" && n > 0) ConvertTo<boost::int64_t>(params[0]);
if (strMethod == "walletpassphrase" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "getblocktemplate" && n > 0) ConvertTo<Object>(params[0]);
if (strMethod == "listsinceblock" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "sendmany" && n > 1) ConvertTo<Object>(params[1]);
if (strMethod == "sendmany" && n > 2) ConvertTo<boost::int64_t>(params[2]);
if (strMethod == "addmultisigaddress" && n > 0) ConvertTo<boost::int64_t>(params[0]);
if (strMethod == "addmultisigaddress" && n > 1) ConvertTo<Array>(params[1]);
if (strMethod == "createmultisig" && n > 0) ConvertTo<boost::int64_t>(params[0]);
if (strMethod == "createmultisig" && n > 1) ConvertTo<Array>(params[1]);
if (strMethod == "listunspent" && n > 0) ConvertTo<boost::int64_t>(params[0]);
if (strMethod == "listunspent" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "listunspent" && n > 2) ConvertTo<Array>(params[2]);
if (strMethod == "getblock" && n > 1) ConvertTo<bool>(params[1]);
if (strMethod == "getrawtransaction" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "createrawtransaction" && n > 0) ConvertTo<Array>(params[0]);
if (strMethod == "createrawtransaction" && n > 1) ConvertTo<Object>(params[1]);
if (strMethod == "signrawtransaction" && n > 1) ConvertTo<Array>(params[1], true);
if (strMethod == "signrawtransaction" && n > 2) ConvertTo<Array>(params[2], true);
if (strMethod == "gettxout" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "gettxout" && n > 2) ConvertTo<bool>(params[2]);
if (strMethod == "lockunspent" && n > 0) ConvertTo<bool>(params[0]);
if (strMethod == "lockunspent" && n > 1) ConvertTo<Array>(params[1]);
if (strMethod == "importprivkey" && n > 2) ConvertTo<bool>(params[2]);
if (strMethod == "verifychain" && n > 0) ConvertTo<boost::int64_t>(params[0]);
if (strMethod == "verifychain" && n > 1) ConvertTo<boost::int64_t>(params[1]);
return params;
}
int CommandLineRPC(int argc, char *argv[])
{
string strPrint;
int nRet = 0;
try
{
// Skip switches
while (argc > 1 && IsSwitchChar(argv[1][0]))
{
argc--;
argv++;
}
// Method
if (argc < 2)
throw runtime_error("too few parameters");
string strMethod = argv[1];
// Parameters default to strings
std::vector<std::string> strParams(&argv[2], &argv[argc]);
Array params = RPCConvertValues(strMethod, strParams);
// Execute
Object reply = CallRPC(strMethod, params);
// Parse reply
const Value& result = find_value(reply, "result");
const Value& error = find_value(reply, "error");
if (error.type() != null_type)
{
// Error
strPrint = "error: " + write_string(error, false);
int code = find_value(error.get_obj(), "code").get_int();
nRet = abs(code);
}
else
{
// Result
if (result.type() == null_type)
strPrint = "";
else if (result.type() == str_type)
strPrint = result.get_str();
else
strPrint = write_string(result, true);
}
}
catch (boost::thread_interrupted) {
throw;
}
catch (std::exception& e) {
strPrint = string("error: ") + e.what();
nRet = 87;
}
catch (...) {
PrintException(NULL, "CommandLineRPC()");
}
if (strPrint != "")
{
fprintf((nRet == 0 ? stdout : stderr), "%s\n", strPrint.c_str());
}
return nRet;
}
#ifdef TEST
int main(int argc, char *argv[])
{
#ifdef _MSC_VER
// Turn off Microsoft heap dump noise
_CrtSetReportMode(_CRT_WARN, _CRTDBG_MODE_FILE);
_CrtSetReportFile(_CRT_WARN, CreateFile("NUL", GENERIC_WRITE, 0, NULL, OPEN_EXISTING, 0, 0));
#endif
setbuf(stdin, NULL);
setbuf(stdout, NULL);
setbuf(stderr, NULL);
try
{
if (argc >= 2 && string(argv[1]) == "-server")
{
printf("server ready\n");
ThreadRPCServer(NULL);
}
else
{
return CommandLineRPC(argc, argv);
}
}
catch (boost::thread_interrupted) {
throw;
}
catch (std::exception& e) {
PrintException(&e, "main()");
} catch (...) {
PrintException(NULL, "main()");
}
return 0;
}
#endif
const CRPCTable tableRPC;
| w295472444/TMCOIN | src/bitcoinrpc.cpp | C++ | mit | 48,637 |
/**
* @module {Module} utils/math
* @parent utils
*
* The module's description is the first paragraph.
*
* The body of the module's documentation.
*/
import _ from 'lodash';
/**
* @function
*
* This function's description is the first
* paragraph.
*
* This starts the body. This text comes after the signature.
*
* @param {Number} first This param's description.
* @param {Number} second This param's description.
* @return {Number} This return value's description.
*/
export function sum(first, second){ ... };
/**
* @property {{}}
*
* This function's description is the first
* paragraph.
*
* @option {Number} pi The description of pi.
*
* @option {Number} e The description of e.
*/
export var constants = {
pi: 3.14159265359,
e: 2.71828
}; | bitovi/documentjs | lib/process/test/module_with_multiple_exports.js | JavaScript | mit | 781 |
<?php
/* WebProfilerBundle:Collector:router.html.twig */
class __TwigTemplate_c8d21550850074782862265b813a9c2aea7c608253db98e24225c2ea859cc33f extends Twig_Template
{
public function __construct(Twig_Environment $env)
{
parent::__construct($env);
// line 1
$this->parent = $this->loadTemplate("@WebProfiler/Profiler/layout.html.twig", "WebProfilerBundle:Collector:router.html.twig", 1);
$this->blocks = array(
'toolbar' => array($this, 'block_toolbar'),
'menu' => array($this, 'block_menu'),
'panel' => array($this, 'block_panel'),
);
}
protected function doGetParent(array $context)
{
return "@WebProfiler/Profiler/layout.html.twig";
}
protected function doDisplay(array $context, array $blocks = array())
{
$__internal_4f799fe0cc7f22495efc73fba23694e0a3a0583a5214948f3c58038a44fe2573 = $this->env->getExtension("Symfony\\Bundle\\WebProfilerBundle\\Twig\\WebProfilerExtension");
$__internal_4f799fe0cc7f22495efc73fba23694e0a3a0583a5214948f3c58038a44fe2573->enter($__internal_4f799fe0cc7f22495efc73fba23694e0a3a0583a5214948f3c58038a44fe2573_prof = new Twig_Profiler_Profile($this->getTemplateName(), "template", "WebProfilerBundle:Collector:router.html.twig"));
$__internal_e2229cc8c004aedff67d2ce1c45f9efcfa69922e12b762676067fdd639b13361 = $this->env->getExtension("Symfony\\Bridge\\Twig\\Extension\\ProfilerExtension");
$__internal_e2229cc8c004aedff67d2ce1c45f9efcfa69922e12b762676067fdd639b13361->enter($__internal_e2229cc8c004aedff67d2ce1c45f9efcfa69922e12b762676067fdd639b13361_prof = new Twig_Profiler_Profile($this->getTemplateName(), "template", "WebProfilerBundle:Collector:router.html.twig"));
$this->parent->display($context, array_merge($this->blocks, $blocks));
$__internal_4f799fe0cc7f22495efc73fba23694e0a3a0583a5214948f3c58038a44fe2573->leave($__internal_4f799fe0cc7f22495efc73fba23694e0a3a0583a5214948f3c58038a44fe2573_prof);
$__internal_e2229cc8c004aedff67d2ce1c45f9efcfa69922e12b762676067fdd639b13361->leave($__internal_e2229cc8c004aedff67d2ce1c45f9efcfa69922e12b762676067fdd639b13361_prof);
}
// line 3
public function block_toolbar($context, array $blocks = array())
{
$__internal_cb109454b38b7b24070cff9ccc466e56af2d95b49464b6409586a5d2d6a2c19a = $this->env->getExtension("Symfony\\Bundle\\WebProfilerBundle\\Twig\\WebProfilerExtension");
$__internal_cb109454b38b7b24070cff9ccc466e56af2d95b49464b6409586a5d2d6a2c19a->enter($__internal_cb109454b38b7b24070cff9ccc466e56af2d95b49464b6409586a5d2d6a2c19a_prof = new Twig_Profiler_Profile($this->getTemplateName(), "block", "toolbar"));
$__internal_5f178640e8cca7dbb07a0d59d8a8fdfb7be7bfc9c63cb7423b245a520c7e632d = $this->env->getExtension("Symfony\\Bridge\\Twig\\Extension\\ProfilerExtension");
$__internal_5f178640e8cca7dbb07a0d59d8a8fdfb7be7bfc9c63cb7423b245a520c7e632d->enter($__internal_5f178640e8cca7dbb07a0d59d8a8fdfb7be7bfc9c63cb7423b245a520c7e632d_prof = new Twig_Profiler_Profile($this->getTemplateName(), "block", "toolbar"));
$__internal_5f178640e8cca7dbb07a0d59d8a8fdfb7be7bfc9c63cb7423b245a520c7e632d->leave($__internal_5f178640e8cca7dbb07a0d59d8a8fdfb7be7bfc9c63cb7423b245a520c7e632d_prof);
$__internal_cb109454b38b7b24070cff9ccc466e56af2d95b49464b6409586a5d2d6a2c19a->leave($__internal_cb109454b38b7b24070cff9ccc466e56af2d95b49464b6409586a5d2d6a2c19a_prof);
}
// line 5
public function block_menu($context, array $blocks = array())
{
$__internal_72ed7e1b4749995ea5e7260ecc524f48453edb598dbde0a3016fbc2e5b926b3d = $this->env->getExtension("Symfony\\Bundle\\WebProfilerBundle\\Twig\\WebProfilerExtension");
$__internal_72ed7e1b4749995ea5e7260ecc524f48453edb598dbde0a3016fbc2e5b926b3d->enter($__internal_72ed7e1b4749995ea5e7260ecc524f48453edb598dbde0a3016fbc2e5b926b3d_prof = new Twig_Profiler_Profile($this->getTemplateName(), "block", "menu"));
$__internal_475683fdbeda0d78d3d7fe71e064ce50ca9a62435dad2752533107cf00f9f060 = $this->env->getExtension("Symfony\\Bridge\\Twig\\Extension\\ProfilerExtension");
$__internal_475683fdbeda0d78d3d7fe71e064ce50ca9a62435dad2752533107cf00f9f060->enter($__internal_475683fdbeda0d78d3d7fe71e064ce50ca9a62435dad2752533107cf00f9f060_prof = new Twig_Profiler_Profile($this->getTemplateName(), "block", "menu"));
// line 6
echo "<span class=\"label\">
<span class=\"icon\">";
// line 7
echo twig_include($this->env, $context, "@WebProfiler/Icon/router.svg");
echo "</span>
<strong>Routing</strong>
</span>
";
$__internal_475683fdbeda0d78d3d7fe71e064ce50ca9a62435dad2752533107cf00f9f060->leave($__internal_475683fdbeda0d78d3d7fe71e064ce50ca9a62435dad2752533107cf00f9f060_prof);
$__internal_72ed7e1b4749995ea5e7260ecc524f48453edb598dbde0a3016fbc2e5b926b3d->leave($__internal_72ed7e1b4749995ea5e7260ecc524f48453edb598dbde0a3016fbc2e5b926b3d_prof);
}
// line 12
public function block_panel($context, array $blocks = array())
{
$__internal_c19da141a630f3669f5fe7c94e5a968903f29bcb7cd994a4051ecf7ff1079825 = $this->env->getExtension("Symfony\\Bundle\\WebProfilerBundle\\Twig\\WebProfilerExtension");
$__internal_c19da141a630f3669f5fe7c94e5a968903f29bcb7cd994a4051ecf7ff1079825->enter($__internal_c19da141a630f3669f5fe7c94e5a968903f29bcb7cd994a4051ecf7ff1079825_prof = new Twig_Profiler_Profile($this->getTemplateName(), "block", "panel"));
$__internal_312c0a27d2cccc06836145cd58525914aabe820d95097702356e71e0e516ea07 = $this->env->getExtension("Symfony\\Bridge\\Twig\\Extension\\ProfilerExtension");
$__internal_312c0a27d2cccc06836145cd58525914aabe820d95097702356e71e0e516ea07->enter($__internal_312c0a27d2cccc06836145cd58525914aabe820d95097702356e71e0e516ea07_prof = new Twig_Profiler_Profile($this->getTemplateName(), "block", "panel"));
// line 13
echo " ";
echo $this->env->getRuntime('Symfony\Bridge\Twig\Extension\HttpKernelRuntime')->renderFragment($this->env->getExtension('Symfony\Bridge\Twig\Extension\RoutingExtension')->getPath("_profiler_router", array("token" => (isset($context["token"]) ? $context["token"] : $this->getContext($context, "token")))));
echo "
";
$__internal_312c0a27d2cccc06836145cd58525914aabe820d95097702356e71e0e516ea07->leave($__internal_312c0a27d2cccc06836145cd58525914aabe820d95097702356e71e0e516ea07_prof);
$__internal_c19da141a630f3669f5fe7c94e5a968903f29bcb7cd994a4051ecf7ff1079825->leave($__internal_c19da141a630f3669f5fe7c94e5a968903f29bcb7cd994a4051ecf7ff1079825_prof);
}
public function getTemplateName()
{
return "WebProfilerBundle:Collector:router.html.twig";
}
public function isTraitable()
{
return false;
}
public function getDebugInfo()
{
return array ( 94 => 13, 85 => 12, 71 => 7, 68 => 6, 59 => 5, 42 => 3, 11 => 1,);
}
/** @deprecated since 1.27 (to be removed in 2.0). Use getSourceContext() instead */
public function getSource()
{
@trigger_error('The '.__METHOD__.' method is deprecated since version 1.27 and will be removed in 2.0. Use getSourceContext() instead.', E_USER_DEPRECATED);
return $this->getSourceContext()->getCode();
}
public function getSourceContext()
{
return new Twig_Source("{% extends '@WebProfiler/Profiler/layout.html.twig' %}
{% block toolbar %}{% endblock %}
{% block menu %}
<span class=\"label\">
<span class=\"icon\">{{ include('@WebProfiler/Icon/router.svg') }}</span>
<strong>Routing</strong>
</span>
{% endblock %}
{% block panel %}
{{ render(path('_profiler_router', { token: token })) }}
{% endblock %}
", "WebProfilerBundle:Collector:router.html.twig", "/Applications/MAMP/htdocs/Symfony/vendor/symfony/symfony/src/Symfony/Bundle/WebProfilerBundle/Resources/views/Collector/router.html.twig");
}
}
| mehdiYal/Schoolium | var/cache/dev/twig/9d/9ddcdbb0671c46071f6efb8e5daeec192572165664b16d016f57f2b46b9702b3.php | PHP | mit | 8,083 |
// ***********************************************************
// This example plugins/index.js can be used to load plugins
//
// You can change the location of this file or turn off loading
// the plugins file with the 'pluginsFile' configuration option.
//
// You can read more here:
// https://on.cypress.io/plugins-guide
// ***********************************************************
// This function is called when a project is opened or re-opened (e.g. due to
// the project's config changing)
module.exports = (on, config) => {
// `on` is used to hook into various events Cypress emits
// `config` is the resolved Cypress config
if (process.env.CYPRESS_CONNECTION_TYPE) {
on(`before:browser:launch`, (browser = {}, args) => {
if (
browser.name === `chrome` &&
process.env.CYPRESS_CONNECTION_TYPE === `slow`
) {
args.push(`--force-effective-connection-type=2G`)
}
return args
})
}
}
| 0x80/gatsby | e2e-tests/production-runtime/cypress/plugins/index.js | JavaScript | mit | 957 |
<?php
if ($_SESSION['manager']==""){
header("Location: admin_login.php");
exit;
};
header("Location: admin_index.php");
?> | ArvoGuo/old | manager/index.php | PHP | mit | 147 |
using UnityEngine;
using UnityEditor;
using CreateThis.Factory.VR.UI.Button;
namespace MMVR.Factory.UI.Button {
[CustomEditor(typeof(FileManagerSaveButtonFactory))]
[CanEditMultipleObjects]
public class FileManagerSaveButtonFactoryEditor : MomentaryButtonFactoryEditor {
SerializedProperty fileManager;
protected override void OnEnable() {
base.OnEnable();
fileManager = serializedObject.FindProperty("fileManager");
}
protected override void BuildGenerateButton() {
if (GUILayout.Button("Generate")) {
if (target.GetType() == typeof(FileManagerSaveButtonFactory)) {
FileManagerSaveButtonFactory buttonFactory = (FileManagerSaveButtonFactory)target;
buttonFactory.Generate();
}
}
}
protected override void AdditionalProperties() {
base.AdditionalProperties();
EditorGUILayout.PropertyField(fileManager);
}
}
} | createthis/mesh_maker_vr | Assets/MMVR/Factory/UI/Button/FileManager/Editor/FileManagerSaveButtonFactoryEditor.cs | C# | mit | 1,032 |
var box, mbox;
function demo() {
cam ( 0, 20, 40 );
world = new OIMO.World();
world.add({ size:[50, 10, 50], pos:[0,-5,0] }); // ground
var options = {
type:'box',
size:[10, 10, 10],
pos:[0,20,0],
density:1,
move:true
}
box = world.add( options );
mbox = view.add( options ); // three mesh
};
function update () {
world.step();
mbox.position.copy( box.getPosition() );
mbox.quaternion.copy( box.getQuaternion() );
} | lo-th/Oimo.js | examples/docs/rigidbody.js | JavaScript | mit | 504 |
class CreateCourses < ActiveRecord::Migration
def change
create_table :courses do |t|
t.string :name
t.string :short_name
t.string :sisid
t.text :description
t.integer :department_id
t.integer :term_id
t.boolean :graded
t.boolean :archived
t.string :type
t.timestamps
end
end
end
| JonBriggs/SISteMATIC | db/migrate/20131029185157_create_courses.rb | Ruby | mit | 353 |
import traceback
class EnsureExceptionHandledGuard:
"""Helper for ensuring that Future's exceptions were handled.
This solves a nasty problem with Futures and Tasks that have an
exception set: if nobody asks for the exception, the exception is
never logged. This violates the Zen of Python: 'Errors should
never pass silently. Unless explicitly silenced.'
However, we don't want to log the exception as soon as
set_exception() is called: if the calling code is written
properly, it will get the exception and handle it properly. But
we *do* want to log it if result() or exception() was never called
-- otherwise developers waste a lot of time wondering why their
buggy code fails silently.
An earlier attempt added a __del__() method to the Future class
itself, but this backfired because the presence of __del__()
prevents garbage collection from breaking cycles. A way out of
this catch-22 is to avoid having a __del__() method on the Future
class itself, but instead to have a reference to a helper object
with a __del__() method that logs the traceback, where we ensure
that the helper object doesn't participate in cycles, and only the
Future has a reference to it.
The helper object is added when set_exception() is called. When
the Future is collected, and the helper is present, the helper
object is also collected, and its __del__() method will log the
traceback. When the Future's result() or exception() method is
called (and a helper object is present), it removes the the helper
object, after calling its clear() method to prevent it from
logging.
One downside is that we do a fair amount of work to extract the
traceback from the exception, even when it is never logged. It
would seem cheaper to just store the exception object, but that
references the traceback, which references stack frames, which may
reference the Future, which references the _EnsureExceptionHandledGuard,
and then the _EnsureExceptionHandledGuard would be included in a cycle,
which is what we're trying to avoid! As an optimization, we don't
immediately format the exception; we only do the work when
activate() is called, which call is delayed until after all the
Future's callbacks have run. Since usually a Future has at least
one callback (typically set by 'yield from') and usually that
callback extracts the callback, thereby removing the need to
format the exception.
PS. I don't claim credit for this solution. I first heard of it
in a discussion about closing files when they are collected.
"""
__slots__ = ['exc', 'tb', 'hndl', 'cls']
def __init__(self, exc, handler):
self.exc = exc
self.hndl = handler
self.cls = type(exc)
self.tb = None
def activate(self):
exc = self.exc
if exc is not None:
self.exc = None
self.tb = traceback.format_exception(exc.__class__, exc,
exc.__traceback__)
def clear(self):
self.exc = None
self.tb = None
def __del__(self):
if self.tb:
self.hndl(self.cls, self.tb)
| mikhtonyuk/rxpython | concurrent/futures/cooperative/ensure_exception_handled.py | Python | mit | 3,261 |
module GitNetworkitis
class Getter
include HTTParty
include JSONHelper
base_uri 'https://api.github.com'
attr_accessor :url, :local_options, :query_options
LOCAL_KEYS = [:batch, :since]
def initialize(url, options={})
@url = url
scrub_local_options options
@query_options = options
end
def get
local_options[:batch] ? batched_get : single_get
end
private
def scrub_local_options(options={})
@local_options = LOCAL_KEYS.inject({}) {|opts, key| opts[key] = options.delete(key); opts }
@local_options[:batch] = true unless @local_options[:since].nil?
end
def single_get(use_query_options=true)
ret = use_query_options ? Getter.get(url, query: query_options) : Getter.get(url)
if ret.response.code == "200"
return ret
else
raise "Unable to find Github Repository"
end
end
def batched_get
resps = []
links = {next: url}
first_batch = true
while links[:next] do
self.url = links[:next]
resp = single_get first_batch
resps << resp
first_batch = false
links = build_links_from_headers resp.headers['link']
end
BatchResponse.new resps
end
# see the json files in spec/vcr_cassettes for examples of what the link headers look like
def build_links_from_headers(headers)
return {} if headers.nil?
links = headers.split(',')
links.inject({}) do |rel, link|
l = link.strip.split(';')
next_link = l.first[1...-1] # [1...-1] because the actual link is enclosed within '<' '>' tags
rel_command = l.last.strip.match(/rel=\"(.*)\"/).captures.first.to_sym # e.g. "rel=\"next\"" #=> :next
rel.tap {|r| r[rel_command] = next_link }
end
end
end
end
| jcoutu/gitnetworkitis | lib/gitnetworkitis/getter.rb | Ruby | mit | 1,826 |
var fs = require('fs'),
cons = require('consolidate'),
dust = require('dustjs-linkedin');
var pages = [
'index',
'contact',
'faq',
'registration',
'sponsors',
'travel',
'visit',
'volunteers'
];
pages.forEach(function(page) {
cons.dust('views/'+page+'.dust', { views: __dirname+'/views'}, function(err, html) {
if(err) return console.log('error: ', err);
fs.writeFile(__dirname+'/dist/'+page+'.html', html, function(err) {
if(err) return console.log('error saving file: ', page, err);
console.log('create page: ', page);
});
});
});
| selfcontained/mini-cassia-turkey-trot | generate.js | JavaScript | mit | 563 |
__package__ = 'archivebox.core'
| pirate/bookmark-archiver | archivebox/core/__init__.py | Python | mit | 32 |
package com.team2502.robot2017.command.autonomous;
import edu.wpi.first.wpilibj.command.CommandGroup;
public class ShinyFollow extends CommandGroup
{
/**
* Does a follow
*/
public ShinyFollow() { addSequential(new AutoVisionCommand(200, 0.3)); }
} | Team-2502/UpdatedRobotCode2017 | src/com/team2502/robot2017/command/autonomous/ShinyFollow.java | Java | mit | 267 |
package br.com.caelum.rest.server;
import javax.servlet.http.HttpServletRequest;
public class SimpleAction implements Action {
public String getUri() {
return uri;
}
public String getRel() {
return rel;
}
private final String uri;
private final String rel;
public SimpleAction(String rel, String uri) {
this.rel = rel;
this.uri = uri;
}
public SimpleAction(String rel, HttpServletRequest request, String uri) {
this.rel = rel;
this.uri = "http://restful-server.appspot.com" + uri;
// this.uri = "http://" + request.getServerName() + ":" + request.getServerPort() + request.getContextPath() + uri;
}
}
| caelum/rest-client | rest-server-gae/src/br/com/caelum/rest/server/SimpleAction.java | Java | mit | 638 |
package com.ms.meizinewsapplication.features.meizi.model;
import android.content.Context;
import com.ms.meizinewsapplication.features.base.pojo.ImgItem;
import com.ms.retrofitlibrary.web.MyOkHttpClient;
import org.loader.model.OnModelListener;
import java.util.List;
import rx.Observable;
import rx.Subscription;
/**
* Created by 啟成 on 2016/3/15.
*/
public class DbGroupBreastModel extends DbGroupModel {
private String pager_offset;
public Subscription loadWeb(Context context, OnModelListener<List<ImgItem>> listener, String pager_offset) {
this.pager_offset = pager_offset;
return loadWeb(context, listener);
}
@Override
protected Subscription reSubscription(Context context, OnModelListener<List<ImgItem>> listener) {
Observable<String> dbGroupBreast = getDbGroup().RxDbGroupBreast(
MyOkHttpClient.getCacheControl(context),
pager_offset
);
return rxDbGroup(dbGroupBreast, listener);
}
}
| qq137712630/MeiZiNews | app/src/main/java/com/ms/meizinewsapplication/features/meizi/model/DbGroupBreastModel.java | Java | mit | 1,004 |
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("12_RectangleProperties")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("12_RectangleProperties")]
[assembly: AssemblyCopyright("Copyright © 2017")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("efacbe98-13fb-4c4d-b368-04e2f314a249")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| nellypeneva/SoftUniProjects | 01_ProgrFundamentalsMay/11_Data-Types-Exercises/12_RectangleProperties/Properties/AssemblyInfo.cs | C# | mit | 1,420 |
import logging.handlers
import os
_pabotlog = logging.getLogger('PABot')
_pabotlog.setLevel(logging.DEBUG)
_logPath = os.path.abspath("./logging/pabot.log")
_formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(name)s - %(message)s')
_consoleStreamHandler = logging.StreamHandler()
_consoleStreamHandler.setLevel(logging.DEBUG)
_consoleStreamHandler.setFormatter(_formatter)
_symLogRotFileHandler = logging.handlers.RotatingFileHandler(_logPath, maxBytes=2000000, backupCount=5)
_symLogRotFileHandler.setLevel(logging.DEBUG)
_symLogRotFileHandler.setFormatter(_formatter)
_pabotlog.addHandler(_consoleStreamHandler)
_pabotlog.addHandler(_symLogRotFileHandler)
def LogPABotMessage(message):
_pabotlog.info(message)
def LogPABotError(message):
_pabotlog.error(message)
| KevinJMcGrath/Symphony-Ares | modules/plugins/PABot/logging.py | Python | mit | 796 |
<?php
namespace Phpmvc\Comment;
/**
* To attach comments-flow to a page or some content.
*
*/
class CommentController implements \Anax\DI\IInjectionAware
{
use \Anax\DI\TInjectable;
/**
* View all comments.
*
* @return void
*/
public function viewAction($page)
{
$comments = new \Phpmvc\Comment\CommentsInSession();
$comments->setDI($this->di);
$all = $comments->findAll($page);
$this->views->add('comment/comments', [
'comments' => $all,
]);
}
/**
* Add a comment.
*
* @return void
*/
public function addAction()
{
$isPosted = $this->request->getPost('doCreate');
if (!$isPosted) {
$this->response->redirect($this->request->getPost('redirect'));
}
$comment = [
'page' => $this->request->getPost('page'),
'content' => $this->request->getPost('content'),
'name' => $this->request->getPost('name'),
'web' => $this->request->getPost('web'),
'mail' => $this->request->getPost('mail'),
'timestamp' => time(),
'ip' => $this->request->getServer('REMOTE_ADDR'),
];
$comments = new \Phpmvc\Comment\CommentsInSession();
$comments->setDI($this->di);
$comments->add($comment);
$this->response->redirect($this->request->getPost('redirect'));
}
/**
* Remove all comments.
*
* @return void
*/
public function removeAllAction()
{
$isPosted = $this->request->getPost('doRemoveAll');
if (!$isPosted) {
$this->response->redirect($this->request->getPost('redirect'));
}
$comments = new \Phpmvc\Comment\CommentsInSession();
$comments->setDI($this->di);
$comments->deleteAll();
$this->response->redirect($this->request->getPost('redirect'));
}
public function removeAction($id)
{
// $isPosted = $this->request->getPost('doRemove'); //doRemove måste lägga till i formulär i tpl.
// if (!$isPosted) {
// $this->response->redirect($this->request->getPost('redirect'));
// }
$comments = new \Phpmvc\Comment\CommentsInSession();
$comments->setDI($this->di);
$comments->delete($id);
$this->response->redirect($this->request->getPost('redirect'));
}
public function editFormAction($id)
{
$comments = new \Phpmvc\Comment\CommentsInSession();
$comments->setDI($this->di);
$all = $comments->findAll();
$i = 0;
foreach($all as $comment){
if($comment['id'] == $id){
break;
}
$i++;
}
$this->views->add('comment/editComment', [
'comment' => $all[$i],
]);
}
public function editAction($id)
{
$isPosted = $this->request->getPost('doEdit');
if (!$isPosted) {
$this->response->redirect($this->request->getPost('redirect'));
}
$comment = [
'page' => $this->request->getPost('page'),
'content' => $this->request->getPost('content'),
'name' => $this->request->getPost('name'),
'web' => $this->request->getPost('web'),
'mail' => $this->request->getPost('mail'),
'timestamp' => $this->request->getPost('timestamp'),
'ip' => $this->request->getServer('REMOTE_ADDR'),
'id' => $id,
'edited' => time(),
];
$comments = new \Phpmvc\Comment\CommentsInSession();
$comments->setDI($this->di);
$comments->edit($comment, $id);
$this->response->redirect($this->request->getPost('redirect'));
}
}
| frjf14/Projekt | vendor/phpmvc/comment/src/Comment/CommentController.php | PHP | mit | 3,878 |
import ast
import heisenberg.library.heisenberg_dynamics_context
import heisenberg.library.orbit_plot
import heisenberg.option_parser
import heisenberg.plot
import heisenberg.util
import matplotlib
import numpy as np
import sys
# https://github.com/matplotlib/matplotlib/issues/5907 says this should fix "Exceeded cell block limit" problems
matplotlib.rcParams['agg.path.chunksize'] = 10000
dynamics_context = heisenberg.library.heisenberg_dynamics_context.Numeric()
op = heisenberg.option_parser.OptionParser(module=heisenberg.plot)
# Add the subprogram-specific options here.
op.add_option(
'--initial-preimage',
dest='initial_preimage',
type='string',
help='Specifies the preimage of the initial conditions with respect to the embedding map specified by the --embedding-dimension and --embedding-solution-sheet-index option values. Should have the form [x_1,...,x_n], where n is the embedding dimension and x_i is a floating point literal for each i.'
)
op.add_option(
'--initial',
dest='initial',
type='string',
help='Specifies the initial conditions [x,y,z,p_x,p_y,p_z], where each of x,y,z,p_x,p_y,p_z are floating point literals.'
)
op.add_option(
'--optimization-iterations',
dest='optimization_iterations',
default=1000,
type='int',
help='Specifies the number of iterations to run the optimization for (if applicable). Default is 1000.'
)
op.add_option(
'--optimize-initial',
dest='optimize_initial',
action='store_true',
default=False,
help='Indicates that the specified initial condition (via whichever of the --initial... options) should be used as the starting point for an optimization to attempt to close the orbit. Default value is False.'
)
op.add_option(
'--output-dir',
dest='output_dir',
default='.',
help='Specifies the directory to write plot images and data files to. Default is current directory.'
)
op.add_option(
'--disable-plot-initial',
dest='disable_plot_initial',
action='store_true',
default=False,
help='Disables plotting the initial curve; only has effect if --optimize-initial is specified.'
)
options,args = op.parse_argv_and_validate()
if options is None:
sys.exit(-1)
num_initial_conditions_specified = sum([
options.initial_preimage is not None,
options.initial is not None,
])
if num_initial_conditions_specified != 1:
print('Some initial condition option must be specified; --initial-preimage, --initial. However, {0} of those were specified.'.format(num_initial_conditions_specified))
op.print_help()
sys.exit(-1)
# Validate subprogram-specific options here.
# Attempt to parse initial conditions. Upon success, the attribute options.qp_0 should exist.
if options.initial_preimage is not None:
try:
options.initial_preimage = np.array(ast.literal_eval(options.initial_preimage))
expected_shape = (options.embedding_dimension,)
if options.initial_preimage.shape != expected_shape:
raise ValueError('--initial-preimage value had the wrong number of components (got {0} but expected {1}).'.format(options.initial_preimage.shape, expected_shape))
options.qp_0 = dynamics_context.embedding(N=options.embedding_dimension, sheet_index=options.embedding_solution_sheet_index)(options.initial_preimage)
except Exception as e:
print('error parsing --initial-preimage value; error was {0}'.format(e))
op.print_help()
sys.exit(-1)
elif options.initial is not None:
try:
options.initial = heisenberg.util.csv_as_ndarray(heisenberg.util.pop_brackets_off_of(options.initial), float)
expected_shape = (6,)
if options.initial.shape != expected_shape:
raise ValueError('--initial value had the wrong number of components (got {0} but expected {1}).'.format(options.initial.shape, expected_shape))
options.qp_0 = options.initial.reshape(2,3)
except ValueError as e:
print('error parsing --initial value: {0}'.format(str(e)))
op.print_help()
sys.exit(-1)
else:
assert False, 'this should never happen because of the check with num_initial_conditions_specified'
rng = np.random.RandomState(options.seed)
heisenberg.plot.plot(dynamics_context, options, rng=rng)
| vdods/heisenberg | heisenberg/plot/__main__.py | Python | mit | 4,283 |
def send_simple_message():
return requests.post(
"https://api.mailgun.net/v3/sandbox049ff464a4d54974bb0143935f9577ef.mailgun.org/messages",
auth=("api", "key-679dc79b890e700f11f001a6bf86f4a1"),
data={"from": "Mailgun Sandbox <postmaster@sandbox049ff464a4d54974bb0143935f9577ef.mailgun.org>",
"to": "nick <nicorellius@gmail.com>",
"subject": "Hello nick",
"text": "Congratulations nick, you just sent an email with Mailgun! You are truly awesome! You can see a record of this email in your logs: https://mailgun.com/cp/log . You can send up to 300 emails/day from this sandbox server. Next, you should add your own domain so you can send 10,000 emails/month for free."})
# cURL command to send mail aith API key
# curl -s --user 'api:key-679dc79b890e700f11f001a6bf86f4a1' \
# https://api.mailgun.net/v3/mail.pdxpixel.com/messages \
# -F from='Excited User <mailgun@pdxpixel.com>' \
# -F to=nick@pdxpixel.com \
# -F subject='Hello' \
# -F text='Testing some Mailgun awesomness!'
| nicorellius/pdxpixel | pdxpixel/core/mailgun.py | Python | mit | 1,073 |
<?php
namespace Ooxif\LaravelSpecSchema\SqlServer;
trait BlueprintTrait
{
} | ooxif/laravel-spec-schema | src/Ooxif/LaravelSpecSchema/SqlServer/BlueprintTrait.php | PHP | mit | 77 |
def load_keys(filepath):
"""
Loads the Twitter API keys into a dict.
:param filepath: file path to config file with Twitter API keys.
:return: keys_dict
:raise: IOError
"""
try:
keys_file = open(filepath, 'rb')
keys = {}
for line in keys_file:
key, value = line.split('=')
keys[key.strip()] = value.strip()
except IOError:
message = ('File {} cannot be opened.'
' Check that it exists and is binary.')
print message.format(filepath)
raise
except:
print "Error opening or unpickling file."
raise
return keys
| nhatbui/LebronCoin | lebroncoin/key_loader.py | Python | mit | 654 |
"""Main entry points for scripts."""
from __future__ import print_function, division
from argparse import ArgumentParser
from collections import OrderedDict
from copy import copy
from datetime import datetime
import glob
import json
import logging
import math
import os
import scipy.stats
import numpy as np
from .version import __version__
from .psffuncs import gaussian_moffat_psf
from .psf import TabularPSF, GaussianMoffatPSF
from .io import read_datacube, write_results, read_results
from .fitting import (guess_sky, fit_galaxy_single, fit_galaxy_sky_multi,
fit_position_sky, fit_position_sky_sn_multi,
RegularizationPenalty)
from .utils import yxbounds
from .extern import ADR, Hyper_PSF3D_PL
__all__ = ["cubefit", "cubefit_subtract", "cubefit_plot"]
MODEL_SHAPE = (32, 32)
SPAXEL_SIZE = 0.43
MIN_NMAD = 2.5 # Minimum Number of Median Absolute Deviations above
# the minimum spaxel value in fit_position
LBFGSB_FACTOR = 1e10
REFWAVE = 5000. # reference wavelength in Angstroms for PSF params and ADR
POSITION_BOUND = 3. # Bound on fitted positions relative in initial positions
def snfpsf(wave, psfparams, header, psftype):
"""Create a 3-d PSF based on SNFactory-specific parameterization of
Gaussian + Moffat PSF parameters and ADR."""
# Get Gaussian+Moffat parameters at each wavelength.
relwave = wave / REFWAVE - 1.0
ellipticity = abs(psfparams[0]) * np.ones_like(wave)
alpha = np.abs(psfparams[1] +
psfparams[2] * relwave +
psfparams[3] * relwave**2)
# correlated parameters (coefficients determined externally)
sigma = 0.545 + 0.215 * alpha # Gaussian parameter
beta = 1.685 + 0.345 * alpha # Moffat parameter
eta = 1.040 + 0.0 * alpha # gaussian ampl. / moffat ampl.
# Atmospheric differential refraction (ADR): Because of ADR,
# the center of the PSF will be different at each wavelength,
# by an amount that we can determine (pretty well) from the
# atmospheric conditions and the pointing and angle of the
# instrument. We calculate the offsets here as a function of
# observation and wavelength and input these to the model.
# Correction to parallactic angle and airmass for 2nd-order effects
# such as MLA rotation, mechanical flexures or finite-exposure
# corrections. These values have been trained on faint-std star
# exposures.
#
# `predict_adr_params` uses 'AIRMASS', 'PARANG' and 'CHANNEL' keys
# in input dictionary.
delta, theta = Hyper_PSF3D_PL.predict_adr_params(header)
# check for crazy values of pressure and temperature, and assign default
# values.
pressure = header.get('PRESSURE', 617.)
if not 550. < pressure < 650.:
pressure = 617.
temp = header.get('TEMP', 2.)
if not -20. < temp < 20.:
temp = 2.
adr = ADR(pressure, temp, lref=REFWAVE, delta=delta, theta=theta)
adr_refract = adr.refract(0, 0, wave, unit=SPAXEL_SIZE)
# adr_refract[0, :] corresponds to x, adr_refract[1, :] => y
xctr, yctr = adr_refract
if psftype == 'gaussian-moffat':
return GaussianMoffatPSF(sigma, alpha, beta, ellipticity, eta,
yctr, xctr, MODEL_SHAPE, subpix=3)
elif psftype == 'tabular':
A = gaussian_moffat_psf(sigma, alpha, beta, ellipticity, eta,
yctr, xctr, MODEL_SHAPE, subpix=3)
return TabularPSF(A)
else:
raise ValueError("unknown psf type: " + repr(psftype))
def setup_logging(loglevel, logfname=None):
# if loglevel isn't an integer, parse it as "debug", "info", etc:
if not isinstance(loglevel, int):
loglevel = getattr(logging, loglevel.upper(), None)
if not isinstance(loglevel, int):
print('Invalid log level: %s' % loglevel)
exit(1)
# remove logfile if it already exists
if logfname is not None and os.path.exists(logfname):
os.remove(logfname)
logging.basicConfig(filename=logfname, format="%(levelname)s %(message)s",
level=loglevel)
def cubefit(argv=None):
DESCRIPTION = "Fit SN + galaxy model to SNFactory data cubes."
parser = ArgumentParser(prog="cubefit", description=DESCRIPTION)
parser.add_argument("configfile",
help="configuration file name (JSON format)")
parser.add_argument("outfile", help="Output file name (FITS format)")
parser.add_argument("--dataprefix", default="",
help="path prepended to data file names; default is "
"empty string")
parser.add_argument("--logfile", help="Write log to this file "
"(default: print to stdout)", default=None)
parser.add_argument("--loglevel", default="info",
help="one of: debug, info, warning (default is info)")
parser.add_argument("--diagdir", default=None,
help="If given, write intermediate diagnostic results "
"to this directory")
parser.add_argument("--refitgal", default=False, action="store_true",
help="Add an iteration where galaxy model is fit "
"using all epochs and then data/SN positions are "
"refit")
parser.add_argument("--mu_wave", default=0.07, type=float,
help="Wavelength regularization parameter. "
"Default is 0.07.")
parser.add_argument("--mu_xy", default=0.001, type=float,
help="Spatial regularization parameter. "
"Default is 0.001.")
parser.add_argument("--psftype", default="gaussian-moffat",
help="Type of PSF: 'gaussian-moffat' or 'tabular'. "
"Currently, tabular means generate a tabular PSF from "
"gaussian-moffat parameters.")
args = parser.parse_args(argv)
setup_logging(args.loglevel, logfname=args.logfile)
# record start time
tstart = datetime.now()
logging.info("cubefit v%s started at %s", __version__,
tstart.strftime("%Y-%m-%d %H:%M:%S"))
tsteps = OrderedDict() # finish time of each step.
logging.info("parameters: mu_wave={:.3g} mu_xy={:.3g} refitgal={}"
.format(args.mu_wave, args.mu_xy, args.refitgal))
logging.info(" psftype={}".format(args.psftype))
logging.info("reading config file")
with open(args.configfile) as f:
cfg = json.load(f)
# basic checks on config contents.
assert (len(cfg["filenames"]) == len(cfg["xcenters"]) ==
len(cfg["ycenters"]) == len(cfg["psf_params"]))
# -------------------------------------------------------------------------
# Load data cubes from the list of FITS files.
nt = len(cfg["filenames"])
logging.info("reading %d data cubes", nt)
cubes = []
for fname in cfg["filenames"]:
logging.debug(" reading %s", fname)
cubes.append(read_datacube(os.path.join(args.dataprefix, fname)))
wave = cubes[0].wave
nw = len(wave)
# assign some local variables for convenience
refs = cfg["refs"]
master_ref = cfg["master_ref"]
if master_ref not in refs:
raise ValueError("master ref choice must be one of the final refs (" +
" ".join(refs.astype(str)) + ")")
nonmaster_refs = [i for i in refs if i != master_ref]
nonrefs = [i for i in range(nt) if i not in refs]
# Ensure that all cubes have the same wavelengths.
if not all(np.all(cubes[i].wave == wave) for i in range(1, nt)):
raise ValueError("all data must have same wavelengths")
# -------------------------------------------------------------------------
# PSF for each observation
logging.info("setting up PSF for all %d epochs", nt)
psfs = [snfpsf(wave, cfg["psf_params"][i], cubes[i].header, args.psftype)
for i in range(nt)]
# -------------------------------------------------------------------------
# Initialize all model parameters to be fit
yctr0 = np.array(cfg["ycenters"])
xctr0 = np.array(cfg["xcenters"])
galaxy = np.zeros((nw, MODEL_SHAPE[0], MODEL_SHAPE[1]), dtype=np.float64)
sn = np.zeros((nt, nw), dtype=np.float64) # SN spectrum at each epoch
skys = np.zeros((nt, nw), dtype=np.float64) # Sky spectrum at each epoch
yctr = yctr0.copy()
xctr = xctr0.copy()
snctr = (0., 0.)
# For writing out to FITS
modelwcs = {"CRVAL1": -SPAXEL_SIZE * (MODEL_SHAPE[0] - 1) / 2.,
"CRPIX1": 1,
"CDELT1": SPAXEL_SIZE,
"CRVAL2": -SPAXEL_SIZE * (MODEL_SHAPE[1] - 1) / 2.,
"CRPIX2": 1,
"CDELT2": SPAXEL_SIZE,
"CRVAL3": cubes[0].header["CRVAL3"],
"CRPIX3": cubes[0].header["CRPIX3"],
"CDELT3": cubes[0].header["CDELT3"]}
# -------------------------------------------------------------------------
# Position bounds
# Bounds on data position: shape=(nt, 2)
xctrbounds = np.vstack((xctr - POSITION_BOUND, xctr + POSITION_BOUND)).T
yctrbounds = np.vstack((yctr - POSITION_BOUND, yctr + POSITION_BOUND)).T
snctrbounds = (-POSITION_BOUND, POSITION_BOUND)
# For data positions, check that bounds do not extend
# past the edge of the model and adjust the minbound and maxbound.
# This doesn't apply to SN position.
gshape = galaxy.shape[1:3] # model shape
for i in range(nt):
dshape = cubes[i].data.shape[1:3]
(yminabs, ymaxabs), (xminabs, xmaxabs) = yxbounds(gshape, dshape)
yctrbounds[i, 0] = max(yctrbounds[i, 0], yminabs)
yctrbounds[i, 1] = min(yctrbounds[i, 1], ymaxabs)
xctrbounds[i, 0] = max(xctrbounds[i, 0], xminabs)
xctrbounds[i, 1] = min(xctrbounds[i, 1], xmaxabs)
# -------------------------------------------------------------------------
# Guess sky
logging.info("guessing sky for all %d epochs", nt)
for i, cube in enumerate(cubes):
skys[i, :] = guess_sky(cube, npix=30)
# -------------------------------------------------------------------------
# Regularization penalty parameters
# Calculate rough average galaxy spectrum from all final refs.
spectra = np.zeros((len(refs), len(wave)), dtype=np.float64)
for j, i in enumerate(refs):
avg_spec = np.average(cubes[i].data, axis=(1, 2)) - skys[i]
mean_spec, bins, bn = scipy.stats.binned_statistic(wave, avg_spec,
bins=len(wave)/10)
spectra[j] = np.interp(wave, bins[:-1] + np.diff(bins)[0]/2.,
mean_spec)
mean_gal_spec = np.average(spectra, axis=0)
# Ensure that there won't be any negative or tiny values in mean:
mean_floor = 0.1 * np.median(mean_gal_spec)
mean_gal_spec[mean_gal_spec < mean_floor] = mean_floor
galprior = np.zeros((nw, MODEL_SHAPE[0], MODEL_SHAPE[1]), dtype=np.float64)
regpenalty = RegularizationPenalty(galprior, mean_gal_spec, args.mu_xy,
args.mu_wave)
tsteps["setup"] = datetime.now()
# -------------------------------------------------------------------------
# Fit just the galaxy model to just the master ref.
data = cubes[master_ref].data - skys[master_ref, :, None, None]
weight = cubes[master_ref].weight
logging.info("fitting galaxy to master ref [%d]", master_ref)
galaxy = fit_galaxy_single(galaxy, data, weight,
(yctr[master_ref], xctr[master_ref]),
psfs[master_ref], regpenalty, LBFGSB_FACTOR)
if args.diagdir:
fname = os.path.join(args.diagdir, 'step1.fits')
write_results(galaxy, skys, sn, snctr, yctr, xctr, yctr0, xctr0,
yctrbounds, xctrbounds, cubes, psfs, modelwcs, fname)
tsteps["fit galaxy to master ref"] = datetime.now()
# -------------------------------------------------------------------------
# Fit the positions of the other final refs
#
# Here we only use spaxels where the *model* has significant flux.
# We define "significant" as some number of median absolute deviations
# (MAD) above the minimum flux in the model. We (temporarily) set the
# weight of "insignificant" spaxels to zero during this process, then
# restore the original weight after we're done.
#
# If there are less than 20 "significant" spaxels, we do not attempt to
# fit the position, but simply leave it as is.
logging.info("fitting position of non-master refs %s", nonmaster_refs)
for i in nonmaster_refs:
cube = cubes[i]
# Evaluate galaxy on this epoch for purpose of masking spaxels.
gal = psfs[i].evaluate_galaxy(galaxy, (cube.ny, cube.nx),
(yctr[i], xctr[i]))
# Set weight of low-valued spaxels to zero.
gal2d = gal.sum(axis=0) # Sum of gal over wavelengths
mad = np.median(np.abs(gal2d - np.median(gal2d)))
mask = gal2d > np.min(gal2d) + MIN_NMAD * mad
if mask.sum() < 20:
continue
weight = cube.weight * mask[None, :, :]
fctr, fsky = fit_position_sky(galaxy, cube.data, weight,
(yctr[i], xctr[i]), psfs[i],
(yctrbounds[i], xctrbounds[i]))
yctr[i], xctr[i] = fctr
skys[i, :] = fsky
tsteps["fit positions of other refs"] = datetime.now()
# -------------------------------------------------------------------------
# Redo model fit, this time including all final refs.
datas = [cubes[i].data for i in refs]
weights = [cubes[i].weight for i in refs]
ctrs = [(yctr[i], xctr[i]) for i in refs]
psfs_refs = [psfs[i] for i in refs]
logging.info("fitting galaxy to all refs %s", refs)
galaxy, fskys = fit_galaxy_sky_multi(galaxy, datas, weights, ctrs,
psfs_refs, regpenalty, LBFGSB_FACTOR)
# put fitted skys back in `skys`
for i,j in enumerate(refs):
skys[j, :] = fskys[i]
if args.diagdir:
fname = os.path.join(args.diagdir, 'step2.fits')
write_results(galaxy, skys, sn, snctr, yctr, xctr, yctr0, xctr0,
yctrbounds, xctrbounds, cubes, psfs, modelwcs, fname)
tsteps["fit galaxy to all refs"] = datetime.now()
# -------------------------------------------------------------------------
# Fit position of data and SN in non-references
#
# Now we think we have a good galaxy model. We fix this and fit
# the relative position of the remaining epochs (which presumably
# all have some SN light). We simultaneously fit the position of
# the SN itself.
logging.info("fitting position of all %d non-refs and SN position",
len(nonrefs))
if len(nonrefs) > 0:
datas = [cubes[i].data for i in nonrefs]
weights = [cubes[i].weight for i in nonrefs]
psfs_nonrefs = [psfs[i] for i in nonrefs]
fyctr, fxctr, snctr, fskys, fsne = fit_position_sky_sn_multi(
galaxy, datas, weights, yctr[nonrefs], xctr[nonrefs],
snctr, psfs_nonrefs, LBFGSB_FACTOR, yctrbounds[nonrefs],
xctrbounds[nonrefs], snctrbounds)
# put fitted results back in parameter lists.
yctr[nonrefs] = fyctr
xctr[nonrefs] = fxctr
for i,j in enumerate(nonrefs):
skys[j, :] = fskys[i]
sn[j, :] = fsne[i]
tsteps["fit positions of nonrefs & SN"] = datetime.now()
# -------------------------------------------------------------------------
# optional step(s)
if args.refitgal and len(nonrefs) > 0:
if args.diagdir:
fname = os.path.join(args.diagdir, 'step3.fits')
write_results(galaxy, skys, sn, snctr, yctr, xctr, yctr0, xctr0,
yctrbounds, xctrbounds, cubes, psfs, modelwcs, fname)
# ---------------------------------------------------------------------
# Redo fit of galaxy, using ALL epochs, including ones with SN
# light. We hold the SN "fixed" simply by subtracting it from the
# data and fitting the remainder.
#
# This is slightly dangerous: any errors in the original SN
# determination, whether due to an incorrect PSF or ADR model
# or errors in the galaxy model will result in residuals. The
# galaxy model will then try to compensate for these.
#
# We should look at the galaxy model at the position of the SN
# before and after this step to see if there is a bias towards
# the galaxy flux increasing.
logging.info("fitting galaxy using all %d epochs", nt)
datas = [cube.data for cube in cubes]
weights = [cube.weight for cube in cubes]
ctrs = [(yctr[i], xctr[i]) for i in range(nt)]
# subtract SN from non-ref cubes.
for i in nonrefs:
s = psfs[i].point_source(snctr, datas[i].shape[1:3], ctrs[i])
# do *not* use in-place operation (-=) here!
datas[i] = cubes[i].data - sn[i, :, None, None] * s
galaxy, fskys = fit_galaxy_sky_multi(galaxy, datas, weights, ctrs,
psfs, regpenalty, LBFGSB_FACTOR)
for i in range(nt):
skys[i, :] = fskys[i] # put fitted skys back in skys
if args.diagdir:
fname = os.path.join(args.diagdir, 'step4.fits')
write_results(galaxy, skys, sn, snctr, yctr, xctr, yctr0, xctr0,
yctrbounds, xctrbounds, cubes, psfs, modelwcs, fname)
# ---------------------------------------------------------------------
# Repeat step before last: fit position of data and SN in
# non-references
logging.info("re-fitting position of all %d non-refs and SN position",
len(nonrefs))
if len(nonrefs) > 0:
datas = [cubes[i].data for i in nonrefs]
weights = [cubes[i].weight for i in nonrefs]
psfs_nonrefs = [psfs[i] for i in nonrefs]
fyctr, fxctr, snctr, fskys, fsne = fit_position_sky_sn_multi(
galaxy, datas, weights, yctr[nonrefs], xctr[nonrefs],
snctr, psfs_nonrefs, LBFGSB_FACTOR, yctrbounds[nonrefs],
xctrbounds[nonrefs], snctrbounds)
# put fitted results back in parameter lists.
yctr[nonrefs] = fyctr
xctr[nonrefs] = fxctr
for i, j in enumerate(nonrefs):
skys[j, :] = fskys[i]
sn[j, :] = fsne[i]
# -------------------------------------------------------------------------
# Write results
logging.info("writing results to %s", args.outfile)
write_results(galaxy, skys, sn, snctr, yctr, xctr, yctr0, xctr0,
yctrbounds, xctrbounds, cubes, psfs, modelwcs, args.outfile)
# time info
logging.info("step times:")
maxlen = max(len(key) for key in tsteps)
fmtstr = " %2dm%02ds - %-" + str(maxlen) + "s"
tprev = tstart
for key, tstep in tsteps.items():
t = (tstep - tprev).seconds
logging.info(fmtstr, t//60, t%60, key)
tprev = tstep
tfinish = datetime.now()
logging.info("finished at %s", tfinish.strftime("%Y-%m-%d %H:%M:%S"))
t = (tfinish - tstart).seconds
logging.info("took %3dm%2ds", t // 60, t % 60)
return 0
def cubefit_subtract(argv=None):
DESCRIPTION = \
"""Subtract model determined by cubefit from the original data.
The "outnames" key in the supplied configuration file is used to
determine the output FITS file names. The input FITS header is passed
unaltered to the output file, with the following additions:
(1) A `HISTORY` entry. (2) `CBFT_SNX` and `CBFT_SNY` records giving
the cubefit-determined position of the SN relative to the center of
the data array (at the reference wavelength).
This script also writes fitted SN spectra to individual FITS files.
The "sn_outnames" configuration field determines the output filenames.
"""
import shutil
import fitsio
prog_name = "cubefit-subtract"
prog_name_ver = "{} v{}".format(prog_name, __version__)
parser = ArgumentParser(prog=prog_name, description=DESCRIPTION)
parser.add_argument("configfile", help="configuration file name "
"(JSON format), same as cubefit input.")
parser.add_argument("resultfile", help="Result FITS file from cubefit")
parser.add_argument("--dataprefix", default="",
help="path prepended to data file names; default is "
"empty string")
parser.add_argument("--outprefix", default="",
help="path prepended to output file names; default is "
"empty string")
args = parser.parse_args(argv)
setup_logging("info")
# get input & output filenames
with open(args.configfile) as f:
cfg = json.load(f)
fnames = [os.path.join(args.dataprefix, fname)
for fname in cfg["filenames"]]
outfnames = [os.path.join(args.outprefix, fname)
for fname in cfg["outnames"]]
# load results
results = read_results(args.resultfile)
epochs = results["epochs"]
sny, snx = results["snctr"]
if not len(epochs) == len(fnames) == len(outfnames):
raise RuntimeError("number of epochs in result file not equal to "
"number of input and output files in config file")
# subtract and write out.
for fname, outfname, epoch in zip(fnames, outfnames, epochs):
logging.info("writing %s", outfname)
shutil.copy(fname, outfname)
f = fitsio.FITS(outfname, "rw")
data = f[0].read()
data -= epoch["galeval"]
f[0].write(data)
f[0].write_history("galaxy subtracted by " + prog_name_ver)
f[0].write_key("CBFT_SNX", snx - epoch['xctr'],
comment="SN x offset from center at {:.0f} A [spaxels]"
.format(REFWAVE))
f[0].write_key("CBFT_SNY", sny - epoch['yctr'],
comment="SN y offset from center at {:.0f} A [spaxels]"
.format(REFWAVE))
f.close()
# output SN spectra to separate files.
sn_outnames = [os.path.join(args.outprefix, fname)
for fname in cfg["sn_outnames"]]
header = {"CRVAL1": results["header"]["CRVAL3"],
"CRPIX1": results["header"]["CRPIX3"],
"CDELT1": results["header"]["CDELT3"]}
for outfname, epoch in zip(sn_outnames, epochs):
logging.info("writing %s", outfname)
if os.path.exists(outfname): # avoid warning from clobber=True
os.remove(outfname)
with fitsio.FITS(outfname, "rw") as f:
f.write(epoch["sn"], extname="sn", header=header)
f[0].write_history("created by " + prog_name_ver)
return 0
def cubefit_plot(argv=None):
DESCRIPTION = """Plot results and diagnostics from cubefit"""
from .plotting import plot_timeseries, plot_epoch, plot_sn, plot_adr
# arguments are the same as cubefit except an output
parser = ArgumentParser(prog="cubefit-plot", description=DESCRIPTION)
parser.add_argument("configfile", help="configuration filename")
parser.add_argument("resultfile", help="Result filename from cubefit")
parser.add_argument("outprefix", help="output prefix")
parser.add_argument("--dataprefix", default="",
help="path prepended to data file names; default is "
"empty string")
parser.add_argument('-b', '--band', help='timeseries band (U, B, V). '
'Default is a 1000 A wide band in middle of cube.',
default=None, dest='band')
parser.add_argument('--idrfiles', nargs='+', default=None,
help='Prefix of IDR. If given, the cubefit SN '
'spectra are plotted against the production values.')
parser.add_argument("--diagdir", default=None,
help="If given, read intermediate diagnostic "
"results from this directory and include in plot(s)")
parser.add_argument("--plotepochs", default=False, action="store_true",
help="Make diagnostic plots for each epoch")
args = parser.parse_args(argv)
# Read in data
with open(args.configfile) as f:
cfg = json.load(f)
cubes = [read_datacube(os.path.join(args.dataprefix, fname), scale=False)
for fname in cfg["filenames"]]
results = OrderedDict()
# Diagnostic results at each step
if args.diagdir is not None:
fnames = sorted(glob.glob(os.path.join(args.diagdir, "step*.fits")))
for fname in fnames:
name = os.path.basename(fname).split(".")[0]
results[name] = read_results(fname)
# Final result (don't fail if not available)
if os.path.exists(args.resultfile):
results["final"] = read_results(args.resultfile)
# plot time series
plot_timeseries(cubes, results, band=args.band,
fname=(args.outprefix + '_timeseries.png'))
# Plot wave slices and sn, galaxy and sky spectra for all epochs.
if 'final' in results and args.plotepochs:
for i_t in range(len(cubes)):
plot_epoch(cubes[i_t], results['final']['epochs'][i_t],
fname=(args.outprefix + '_epoch%02d.png' % i_t))
# Plot result spectra against IDR spectra.
if 'final' in results and args.idrfiles is not None:
plot_sn(cfg['filenames'], results['final']['epochs']['sn'],
results['final']['wave'], args.idrfiles,
args.outprefix + '_sn.png')
# Plot the x-y coordinates of the adr versus wavelength
# (Skip this for now; contains no interesting information)
#plot_adr(cubes, cubes[0].wave, fname=(args.outprefix + '_adr.png'))
return 0
| snfactory/cubefit | cubefit/main.py | Python | mit | 26,267 |
module IncomeTax
module Countries
class Morocco < Models::Progressive
register 'Morocco', 'MA', 'MAR'
currency 'MAD'
level 30_000, '0%'
level 50_000, '10%'
level 60_000, '20%'
level 80_000, '30%'
level 180_000, '34%'
remainder '38%'
end
end
end
| askl56/income-tax | lib/income_tax/countries/morocco.rb | Ruby | mit | 310 |
using System;
namespace sep20v1.Areas.HelpPage
{
/// <summary>
/// This represents a preformatted text sample on the help page. There's a display template named TextSample associated with this class.
/// </summary>
public class TextSample
{
public TextSample(string text)
{
if (text == null)
{
throw new ArgumentNullException("text");
}
Text = text;
}
public string Text { get; private set; }
public override bool Equals(object obj)
{
TextSample other = obj as TextSample;
return other != null && Text == other.Text;
}
public override int GetHashCode()
{
return Text.GetHashCode();
}
public override string ToString()
{
return Text;
}
}
} | peteratseneca/dps907fall2013 | Week_03/sep20v1/sep20v1/Areas/HelpPage/SampleGeneration/TextSample.cs | C# | mit | 883 |
<?php
namespace YB\Bundle\RemoteTranslationsBundle\Tests\Translation\Loader;
use PHPUnit_Framework_TestCase;
/**
* Class AwsS3LoaderTest
* @package YB\Bundle\RemoteTranslationsBundle\Tests\Translation\Loader
*/
class AwsS3LoaderTest extends PHPUnit_Framework_TestCase
{
/**
* @param mixed $expected
* @param mixed $result
*
* @dataProvider getExamples
*/
public function testIndex($expected, $result)
{
$this->assertSame($expected, $result);
}
/**
* @return \Generator
*/
public function getExamples()
{
yield ['Lorem Ipsum', 'Lorem Ipsum'];
}
}
| yurijbogdanov/symfony-remote-translations-bundle | Tests/Translation/Loader/AwsS3LoaderTest.php | PHP | mit | 636 |
package com.syncano.android.lib.modules.users;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import com.syncano.android.lib.modules.Params;
import com.syncano.android.lib.modules.Response;
/**
* Params to create new user.
*/
public class ParamsUserNew extends Params {
/** Name of user */
@Expose
@SerializedName(value = "user_name")
private String userName;
/** Nickname of user */
@Expose
private String nick;
/** Avatar base64 for user */
@Expose
private String avatar;
/** User's password. */
@Expose
@SerializedName(value = "password")
private String password;
/**
* @param userName
* User name defining user. Can be <code>null</code>.
*/
public ParamsUserNew(String userName) {
setUserName(userName);
}
@Override
public String getMethodName() {
return "user.new";
}
public Response instantiateResponse() {
return new ResponseUserNew();
}
/**
* @return user name
*/
public String getUserName() {
return userName;
}
/**
* Sets user name
*
* @param user_name
* user name
*/
public void setUserName(String userName) {
this.userName = userName;
}
/**
* @return user nickname
*/
public String getNick() {
return nick;
}
/**
* Sets user nickname
*
* @param nick
* nickname
*/
public void setNick(String nick) {
this.nick = nick;
}
/**
* @return avatar base64
*/
public String getAvatar() {
return avatar;
}
/**
* Sets avatar base64
*
* @param avatar
* avatar base64
*/
public void setAvatar(String avatar) {
this.avatar = avatar;
}
/**
* @return password
*/
public String getPassword() {
return password;
}
/**
* @param Sets
* user password
*/
public void setPassword(String password) {
this.password = password;
}
} | Syncano/syncano-android-demo | Eclipse/SyncanoLib/src/com/syncano/android/lib/modules/users/ParamsUserNew.java | Java | mit | 1,864 |
class BeachApiCore::TeamUpdate
include Interactor
def call
if context.team.update context.params
context.status = :ok
else
context.status = :bad_request
context.fail! message: context.team.errors.full_messages
end
end
end
| beachio/beach-api-core | app/interactors/beach_api_core/team_update.rb | Ruby | mit | 259 |
# -*- coding: utf-8 -*-
def calc_note(count, value):
qnt = 0
if count >= value:
qnt = int(count) / value
print '%d nota(s) de R$ %d.00' % (qnt, value)
return count - qnt * value
n = float(raw_input())
print 'NOTAS:'
n = calc_note(n, 100)
n = calc_note(n, 50)
n = calc_note(n, 20)
n = calc_note(n, 10)
n = calc_note(n, 5)
n = calc_note(n, 2)
print 'MOEDAS:'
print '%d moeda(s) de R$ 1.00' % int(n)
n -= int(n)
m50 = n / 0.50
print '%d moeda(s) de R$ 0.50' % m50
n -= int(m50) * 0.50
m25 = n / 0.25
print '%d moeda(s) de R$ 0.25' % m25
n -= int(m25) * 0.25
m10 = n / 0.10
print '%d moeda(s) de R$ 0.10' % m10
n -= int(m10) * 0.10
if round(n, 2) >= 0.05:
print '1 moeda(s) de R$ 0.05'
m1 = (n - 0.05) * 100
else:
print '0 moeda(s) de R$ 0.05'
m1 = round(n, 2) * 100
if round(m1, 0):
print '%.0f moeda(s) de R$ 0.01' % m1
else:
print '0 moeda(s) de R$ 0.01'
| vicenteneto/online-judge-solutions | URI/1-Beginner/1021.py | Python | mit | 907 |
'use strict';
module.exports = function (grunt) {
var exec = require('child_process').exec;
grunt.registerMultiTask('install-dependencies', 'Installs npm dependencies.', function () {
var cb, options, cp;
cb = this.async();
options = this.options({
cwd: '',
stdout: true,
stderr: true,
failOnError: true,
isDevelopment: false
});
var cmd = "npm install";
if(!options.isDevelopment ) cmd += " -production";
cp = exec(cmd, {cwd: options.cwd}, function (err, stdout, stderr) {
if (err && options.failOnError) {
grunt.warn(err);
}
cb();
});
grunt.verbose.writeflags(options, 'Options');
if (options.stdout || grunt.option('verbose')) {
console.log("Running npm install in: " + options.cwd);
cp.stdout.pipe(process.stdout);
}
if (options.stderr || grunt.option('verbose')) {
cp.stderr.pipe(process.stderr);
}
});
};
| justhamade/grunt-install-dependencies | tasks/install-dependencies.js | JavaScript | mit | 949 |
<?php
ini_set('display_errors', 1);
ini_set('error_reporting', -1);
$loader = require __DIR__ . '/../vendor/autoload.php';
$loader->add('ThisPageCannotBeFound', __DIR__);
| thispagecannotbefound/php-signals | tests/bootstrap.php | PHP | mit | 173 |
<?php
/*
Safe sample
input : get the field userData from the variable $_GET via an object, which store it in a array
SANITIZE : use of preg_replace
construction : use of sprintf via a %s with simple quote
*/
/*Copyright 2015 Bertrand STIVALET
Permission is hereby granted, without written agreement or royalty fee, to
use, copy, modify, and distribute this software and its documentation for
any purpose, provided that the above copyright notice and the following
three paragraphs appear in all copies of this software.
IN NO EVENT SHALL AUTHORS BE LIABLE TO ANY PARTY FOR DIRECT,
INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF AUTHORS HAVE
BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
AUTHORS SPECIFICALLY DISCLAIM ANY WARRANTIES INCLUDING, BUT NOT
LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE, AND NON-INFRINGEMENT.
THE SOFTWARE IS PROVIDED ON AN "AS-IS" BASIS AND AUTHORS HAVE NO
OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR
MODIFICATIONS.*/
class Input{
private $input;
public function getInput(){
return $this->input['realOne'];
}
public function __construct(){
$this->input = array();
$this->input['test']= 'safe' ;
$this->input['realOne']= $_GET['UserData'] ;
$this->input['trap']= 'safe' ;
}
}
$temp = new Input();
$tainted = $temp->getInput();
$tainted = preg_replace('/\'/', '', $tainted);
$query = sprintf("echo $'%s';", $tainted);
$res = eval($query);
?> | stivalet/PHP-Vulnerability-test-suite | Injection/CWE_95/safe/CWE_95__object-indexArray__func_preg_replace__echo-sprintf_%s_simple_quote.php | PHP | mit | 1,579 |
ActionController::Dispatcher.to_prepare do
# Extend :account model to add :issues association.
Account.send(:include, AccountServiceRequestAssociations)
# Make issues observable.
ActivityObserver.instance.send :add_observer!, ServiceRequest
# Add :issues plugin helpers.
ActionView::Base.send(:include, ServiceRequestsHelper)
end
# Make the issues commentable.
CommentsController.commentables = CommentsController.commentables + %w(service_request_id) | jensonzhao/crm_fieldservices | lib/crm_fieldservices.rb | Ruby | mit | 470 |
import { Component } from '@angular/core';
@Component({
moduleId: module.id,
selector: 'chips-demo',
templateUrl: 'chips-demo.html',
styleUrls: ['chips-demo.css']
})
export class ChipsDemoComponent {
}
| liuy97/angular2-material-seed | src/client/app/material/demo-app/chips/chips-demo.ts | TypeScript | mit | 211 |
using System;
using System.Collections.Generic;
using System.Text;
public interface IIdentifiable
{
string Id { get; }
}
| giggals/Software-University | Exercises-Interfaces/2. Multiple Implementation/IIdentifiable.cs | C# | mit | 131 |
/*******************************************************************************
* The MIT License (MIT)
*
* Copyright (c) 2015 - 2019 Dr. Marc Mültin (V2G Clarity)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*******************************************************************************/
package com.v2gclarity.risev2g.evcc.states;
import java.util.concurrent.TimeUnit;
import com.v2gclarity.risev2g.evcc.session.V2GCommunicationSessionEVCC;
import com.v2gclarity.risev2g.shared.enumerations.GlobalValues;
import com.v2gclarity.risev2g.shared.enumerations.V2GMessages;
import com.v2gclarity.risev2g.shared.messageHandling.ReactionToIncomingMessage;
import com.v2gclarity.risev2g.shared.messageHandling.TerminateSession;
import com.v2gclarity.risev2g.shared.misc.TimeRestrictions;
import com.v2gclarity.risev2g.shared.utils.SecurityUtils;
import com.v2gclarity.risev2g.shared.v2gMessages.msgDef.AuthorizationReqType;
import com.v2gclarity.risev2g.shared.v2gMessages.msgDef.AuthorizationResType;
import com.v2gclarity.risev2g.shared.v2gMessages.msgDef.ChargeParameterDiscoveryReqType;
import com.v2gclarity.risev2g.shared.v2gMessages.msgDef.EVSEProcessingType;
import com.v2gclarity.risev2g.shared.v2gMessages.msgDef.PaymentOptionType;
import com.v2gclarity.risev2g.shared.v2gMessages.msgDef.V2GMessage;
public class WaitForAuthorizationRes extends ClientState {
public WaitForAuthorizationRes(V2GCommunicationSessionEVCC commSessionContext) {
super(commSessionContext);
}
@Override
public ReactionToIncomingMessage processIncomingMessage(Object message) {
if (isIncomingMessageValid(message, AuthorizationResType.class)) {
V2GMessage v2gMessageRes = (V2GMessage) message;
AuthorizationResType authorizationRes =
(AuthorizationResType) v2gMessageRes.getBody().getBodyElement().getValue();
if (authorizationRes.getEVSEProcessing() == null)
return new TerminateSession("EVSEProcessing parameter of AuthorizationRes is null. Parameter is mandatory.");
if (authorizationRes.getEVSEProcessing().equals(EVSEProcessingType.FINISHED)) {
getLogger().debug("EVSEProcessing was set to FINISHED");
getCommSessionContext().setOngoingTimer(0L);
getCommSessionContext().setOngoingTimerActive(false);
ChargeParameterDiscoveryReqType chargeParameterDiscoveryReq = getChargeParameterDiscoveryReq();
/*
* Save this request in case the ChargeParameterDiscoveryRes indicates that the EVSE is
* still processing. Then this request can just be resent instead of asking the EV again.
*/
getCommSessionContext().setChargeParameterDiscoveryReq(chargeParameterDiscoveryReq);
return getSendMessage(chargeParameterDiscoveryReq, V2GMessages.CHARGE_PARAMETER_DISCOVERY_RES);
} else {
getLogger().debug("EVSEProcessing was set to ONGOING");
long elapsedTimeInMs = 0;
if (getCommSessionContext().isOngoingTimerActive()) {
long elapsedTime = System.nanoTime() - getCommSessionContext().getOngoingTimer();
elapsedTimeInMs = TimeUnit.MILLISECONDS.convert(elapsedTime, TimeUnit.NANOSECONDS);
if (elapsedTimeInMs > TimeRestrictions.V2G_EVCC_ONGOING_TIMEOUT)
return new TerminateSession("Ongoing timer timed out for AuthorizationReq");
} else {
getCommSessionContext().setOngoingTimer(System.nanoTime());
getCommSessionContext().setOngoingTimerActive(true);
}
// [V2G2-684] demands to send an empty AuthorizationReq if the field EVSEProcessing is set to 'Ongoing'
AuthorizationReqType authorizationReq = getAuthorizationReq(null);
return getSendMessage(authorizationReq, V2GMessages.AUTHORIZATION_RES, Math.min((TimeRestrictions.V2G_EVCC_ONGOING_TIMEOUT - (int) elapsedTimeInMs), TimeRestrictions.getV2gEvccMsgTimeout(V2GMessages.AUTHORIZATION_RES)));
}
} else {
return new TerminateSession("Incoming message raised an error");
}
}
}
| V2GClarity/RISE-V2G | RISE-V2G-EVCC/src/main/java/com/v2gclarity/risev2g/evcc/states/WaitForAuthorizationRes.java | Java | mit | 4,963 |
<?php
declare(strict_types=1);
namespace Phpcq\Runner\Test\Console\Definition;
use Phpcq\Runner\Console\Definition\OptionDefinition;
use Phpcq\Runner\Console\Definition\OptionValue\OptionValueDefinition;
use PHPUnit\Framework\TestCase;
/** @covers \Phpcq\Runner\Console\Definition\OptionDefinition */
final class OptionDefinitionTest extends TestCase
{
public function testDefinition(): void
{
$optionValue = $this->getMockForAbstractClass(OptionValueDefinition::class, [], '', false);
$definition = new OptionDefinition(
'foo',
'Full description',
'f',
true,
false,
false,
$optionValue,
'='
);
self::assertSame('foo', $definition->getName());
self::assertSame('Full description', $definition->getDescription());
self::assertSame('f', $definition->getShortcut());
self::assertSame(true, $definition->isRequired());
self::assertSame(false, $definition->isArray());
self::assertSame(false, $definition->isOnlyShortcut());
self::assertSame($optionValue, $definition->getOptionValue());
self::assertSame('=', $definition->getValueSeparator());
}
public function testShortcutOnly(): void
{
$optionValue = $this->getMockForAbstractClass(OptionValueDefinition::class, [], '', false);
$definition = new OptionDefinition(
'foo',
'Full description',
null,
true,
false,
true,
$optionValue,
'='
);
self::assertSame('foo', $definition->getName());
self::assertSame('Full description', $definition->getDescription());
self::assertSame('foo', $definition->getShortcut());
self::assertSame(true, $definition->isRequired());
self::assertSame(false, $definition->isArray());
self::assertSame(true, $definition->isOnlyShortcut());
self::assertSame($optionValue, $definition->getOptionValue());
self::assertSame('=', $definition->getValueSeparator());
}
}
| phpcq/phpcq | tests/Console/Definition/OptionDefinitionTest.php | PHP | mit | 2,135 |
<?php
if (isset($user_data['id'])) {
?>
<script>
var $records_per_page = '<?php echo $this->security->get_csrf_hash(); ?>';
var page_url = '<?php echo base_url(); ?>';
var $user_data ="<?php echo $user_data['id']?>";
</script>
<script src="<?php echo base_url(); ?>assets/js/detail_pages/include/navbar.js"></script>
<?php
}
?>
<!--Main Menu File-->
<!--For Demo Only (Remove below css file and Javascript) -->
<div class="wsmenucontainer clearfix"></div>
<div class="wsmenucontent overlapblackbg "></div>
<div class="wsmenuexpandermain slideRight">
<a id="navToggle" class="animated-arrow slideLeft "><span></span></a>
<a href="<?php echo base_url(); ?>" class="smallogo"><img src="<?php echo base_url(); ?>assets/logos/logo-07.png" width="120" alt=""></a>
<?php if (!empty($user_data['id'])) {
$notyfi_ = $this->M_notify->getnotify($user_data['id'], 1); ?>
<div class="callusicon dropdown notifications">
<a href="" class="dropdown-toggle" data-toggle="dropdown">
<i class="fa fa-globe"></i>
<?php if (count($notyfi_) != 0) {
?><span class="badge bg-lightred"><?php echo count($notyfi_)?></span><?php
} ?>
</a>
<div class="dropdown-menu pull-right with-arrow panel panel-default animated littleFadeInLeft">
<div class="panel-heading">
You have <strong><?php echo count($notyfi_)?></strong> notifications unread
</div>
<ul class="list-group">
<?php
$notify = $this->M_notify->getnotify($user_data['id']);
foreach ($notify as $row) {
if ($row['type'] == 'invite') {
$link = base_url().'chat/dashboard';
} elseif ($row['type'] == 'amper_register') {
$link = base_url().'amper/dashboard_affiliates';
} elseif ($row['type'] == 'amper_register') {
$link = base_url().'amper/dashboard_affiliates';
} elseif ($row['type'] == 'Invite tour') {
$link = $row['active_url'];
} else {
$link = '#';
}
//var_dump($link);exit;
?>
<li class="list-group-item">
<a role="button" tabindex="0" class="media" href="<?=$link?>">
<div class="media-body">
<span class="block"><?php echo $row['messages']?></span>
<small class="text-muted"><?php echo $this->M_user->time_calculation($row['time'])?></small>
</div>
</a>
</li>
<?php
} ?>
</ul>
<div class="panel-footer">
<a href="<?=base_url('notifications/all')?>" role="button" tabindex="0">Show all notifications <i class="fa fa-angle-right pull-right"></i></a>
</div>
</div>
</div>
<?php
}?>
</div>
<?php
$params1 = $this->uri->segment(1);
$params2 = $this->uri->segment(2);
if (($params1 == 'mds' || ($params1 == 'artist' && $params2 == 'amp') || ($params1 == 'artist' && $params2 == 'managerrpk') || $params1 == 'chat' || $params1 == 'social_media' || $params1 == 'the_total_tour') && $user_data['role'] > 2) {
header('Location: '.base_url());
exit;
}
?>
<div class="header">
<div class="wrapper clearfix bigmegamenu">
<?php if (isset($user_data['id']) && $user_data['role'] == 1) {
?>
<nav class="slideLeft ">
<ul class="mobile-sub wsmenu-list wsmenu-list-left_logo">
<!--view login with account artists -->
<li>
<a href="<?php echo base_url(); ?>" title=""><img src="<?php echo base_url(); ?>assets/logos/logo-07.png" alt="" /></a>
<?php
if ($params1 == null) {
?>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<li ><a href="<?php echo base_url("features/artist");?>"><i class="fa fa-arrow-circle-right"></i>Artist</a></li>
<li ><a href="<?php echo base_url("features/fan");?>"><i class="fa fa-arrow-circle-right"></i>Fan</a></li>
<li><a href="#worldwide"><i class="fa fa-arrow-circle-right"></i>Worldwide Featured Artist</a></li>
<li><a href="#local"><i class="fa fa-arrow-circle-right"></i>Local-Featured Artist</a></li>
<li><a href="<?php echo base_url("mds");?>"><i class="fa fa-arrow-circle-right"></i>Music Distribution System</a></li>
<li><a href="<?php echo base_url("features/artist#artist_landing");?>"><i class="fa fa-arrow-circle-right"></i>ALP</a></li>
<!--<li><a href="#epk"><i class="fa fa-arrow-circle-right"></i>Electronic Press Kit</a></li>-->
<li><a href="<?php echo base_url("features/artist#ttt");?>"><i class="fa fa-arrow-circle-right"></i>The Total Tour</a></li>
<li><a href="<?php echo base_url("make_money");?>"><i class="fa fa-arrow-circle-right"></i>Artist Music Player</a></li>
<li><a href="<?php echo base_url("features/artist#social_media");?>"><i class="fa fa-arrow-circle-right"></i>One Stop Social Media</a></li>
<li><a href="<?php echo base_url("features/artist#gigs_events");?>"><i class="fa fa-arrow-circle-right"></i>Gigs & Events</a></li>
<!--<li><a href="#dashboard"><i class="fa fa-arrow-circle-right"></i>Dashboard Chat</a></li>-->
<li><a href="<?php echo base_url("features/artist#music_referral");?>"><i class="fa fa-arrow-circle-right"></i>Musicians Referral</a></li>
</ul>
<?php
} ?>
</li>
</ul>
</nav>
<?php
} else {
?>
<div class="logo"><a href="<?php echo base_url(); ?>" title=""><img src="<?php echo base_url(); ?>assets/logos/logo-07.png" alt="" /></a></div>
<?php
}?>
<!--Main Menu HTML Code-->
<nav class="wsmenu slideLeft ">
<ul class="mobile-sub wsmenu-list wsmenu-list-left">
<?php if (isset($user_data)) {
$check_upgrade = $this->M_user->check_upgrade($user_data['id']);
if (isset($user_data['id']) && $user_data['role'] == 1 && $user_data['is_admin'] == 1) {
?>
<!--view login with account ADMIN -->
<li>
<span class="wsmenu-click"></span><a <?php if ($params1 == 'blogs' || $params1 == 'gigs_events' || $params1 == 'find-a-musician' || $params1 == 'find-a-fan' || $params1 == 'find-an-artist' || $params2 == 'find-a-fan' || $params2 == 'find-an-artist' || $params2 == 'world_wide_featured') {
echo 'class="active"';
} ?> href="#"><i class="fa fa-music"></i> Artists & Fans<span class="arrow"></span></a>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<li <?php if ($params1 == 'blogs') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('blogs') ?>"><i class="fa fa-arrow-circle-right"></i>Blogs</a></li>
<li><a <?php if ($params1 == 'gigs_events') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('gigs_events') ?>"><i class="fa fa-arrow-circle-right"></i>Book A Show</a></li>
<li><a <?php if ($params1 == 'find-a-musician') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('find-a-musician') ?>"><i class="fa fa-arrow-circle-right"></i>Musicians Referral</a></li>
<li><a <?php if ($params2 == 'find-a-fan') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('features/find-a-fan') ?>"><i class="fa fa-arrow-circle-right"></i>Find A Fan</a></li>
<li><a <?php if ($params2 == 'find-an-artist') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('features/find-an-artist') ?>"><i class="fa fa-arrow-circle-right"></i>Find AN Artist</a></li>
</ul>
</li>
<li>
<span class="wsmenu-click"></span><a <?php if ($params1 == 'new-treding' || $params1 == 'hot_video_picks' || $params1 == 'fancapture' || $params2 == 'hot_video_picks' || $params2 == 'new-trending') {
echo 'class="active"';
} ?> href="#"><i class="fa fa-music"></i> Our Artist's Music <span class="arrow"></span></a>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<li <?php if ($params1 == 'fancapture') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('fancapture') ?>"><i class="fa fa-arrow-circle-right"></i>Meet Our Artist</a></li>
<!--<li><a <?php if ($params2 == 'new-trending') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('features/new-trending') ?>"><i class="fa fa-arrow-circle-right"></i>New & Trending</a></li>-->
<li><a <?php if ($params2 == 'world_wide_featured') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('world_wide_featured') ?>"><i class="fa fa-arrow-circle-right"></i>World Wide Featured Artist</a></li>
<li><a <?php if ($params2 == 'hot_video_picks') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('features/hot_video_picks') ?>"><i class="fa fa-arrow-circle-right"></i>Hot Video Picks</a></li>
</ul>
</li>
<li>
<span class="wsmenu-click"></span><a <?php if ($params1 == 'artists' || $params1 == 'make_money' || $params1 == 'top-100-list' || $params1 == 'fancapture') {
echo 'class="active"';
} ?> href="#"><i class="fa fa-music"></i> Earn Money<span class="arrow"></span></a>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<li <?php if ($params1 == 'artists') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('artists') ?>"><i class="fa fa-arrow-circle-right"></i>Create AMP-Video</a></li>
<li <?php if ($params1 == 'make_money') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url(); ?>make_money"><i class="fa fa-arrow-circle-right"></i>Artist Music Player</a></li>
<!--<li <?php if ($params1 == '#') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('#') ?>"><i class="fa fa-arrow-circle-right"></i>How to Earn Money</a></li>
<li <?php if ($params1 == '#') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url(); ?>#"><i class="fa fa-arrow-circle-right"></i>Signup - AMP</a></li>-->
<li <?php if ($params1 == 'fancapture') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('fancapture') ?>"><i class="fa fa-arrow-circle-right"></i>Fan Capture</a></li>
<li><a <?php if ($params1 == 'top-100-list') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('top-100-list') ?>"><i class="fa fa-arrow-circle-right"></i>Top 100 Fans - Amp Sales</a></li>
</ul>
</li>
<li>
<span class="wsmenu-click"></span>
<a <?php if ($params1 == 'mds' || ($params1 == 'artist' && $params2 == 'amp') || ($params1 == 'artist' && $params2 == 'dashboard_epk') || $params1 == 'chat' || $params1 == 'social_media' || $params1 == 'the_total_tour') {
echo 'class="active"';
} ?> href="#"><i class="fa fa-hand-pointer-o"></i> Tool<span class="arrow"></span></a>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<li><a <?php if ($params1 == 'the_total_tour') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('the_total_tour') ?>"><i class="fa fa-arrow-circle-right"></i>The Total Tour</a></li>
<li><a <?php if ($params1 == 'mds') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('mds') ?>"><i class="fa fa-arrow-circle-right"></i>MDS</a></li>
<li><a <?php if ($params1 == 'artist' && $params2 == 'dashboard_epk') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('artist/dashboard_epk')?>"><i class="fa fa-arrow-circle-right"> </i>EPK</a></li>
<li><a <?php if ($params1 == 'chat') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('chat/dashboard') ?>"><i class="fa fa-arrow-circle-right"> </i>Dashboard Chat</a></li>
<li><a <?php if ($params1 == 'social_media') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('social_media') ?>"><i class="fa fa-arrow-circle-right"></i>Social media</a></li>
</ul>
</li>
<li>
<span class="wsmenu-click"></span><a <?php if (($params1 == 'artist' && $params2 != 'amp') && $params2 != 'dashboard_epk' && $params2 != 'profile') {
echo 'class="active"';
} ?> href="#"><i class="fa fa-heartbeat"></i> Dashboard<span class="arrow"></span></a>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<li><a <?php if ($params1 == 'artist' && $params2 == 'managersong') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('artist/managersong') ?>"><i class="fa fa-arrow-circle-right"></i>Songs</a></li>
<li><a <?php if ($params1 == 'artist' && $params2 == 'managervideo') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('artist/managervideo') ?>"><i class="fa fa-arrow-circle-right"></i>Videos</a></li>
<li><a <?php if ($params1 == 'artist' && $params2 == 'managerphoto') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('artist/managerphoto') ?>"><i class="fa fa-arrow-circle-right"></i>Photos</a></li>
<li><a <?php if ($params1 == 'artist' && $params2 == 'manager-comment') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('artist/manager-comment')?>"><i class="fa fa-arrow-circle-right"></i>Comments</a></li>
<li><a <?php if ($params1 == 'artist' && $params2 == 'managerpress') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('artist/managerpress') ?>"><i class="fa fa-arrow-circle-right"></i>Press</a></li>
<li><a <?php if ($params1 == 'artist' && $params2 == 'blogsmanager') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('artist/blogsmanager') ?>"><i class="fa fa-arrow-circle-right"></i>Blogs</a></li>
<li><a <?php if ($params1 == 'artist' && $params2 == 'basic_info') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('artist/basic_info'); ?>"><i class="fa fa-arrow-circle-right"></i>Customize Profile</a></li>
</ul>
</li>
<li class="top120">
<span class="wsmenu-click"></span><a <?php if ($params2 == 'stop_typing' || $params1 == 'hot_video_picks') {
echo 'class="active"';
} ?> href="#"><i class="fa fa-align-justify"></i> Social Media<span class="arrow"></span></a>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<!-- TODO: <li <?php if ($params2 == 'stop_typing') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('social_media/stop_typing') ?>"><i class="fa fa-arrow-circle-right"></i>1 stop typing</a></li> -->
<li <?php if ($params1 == 'hot_video_picks') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('features/hot_video_picks') ?>"><i class="fa fa-arrow-circle-right"></i>Top 100 Fans - Amp Sales</a></li>
</ul>
</li>
<li class="top120">
<span class="wsmenu-click"></span><a <?php if ($params1 == 'new_artist') {
echo 'class="active"';
} ?> href="#"><i class="fa fa-music"></i> Music Pages<span class="arrow"></span></a>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<li <?php if ($params1 == 'new_artist') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('new_artist') ?>"><i class="fa fa-arrow-circle-right"></i>New Artist</a></li>
</ul>
</li>
<li class="top120">
<span class="wsmenu-click"></span><a <?php if ($params1 == 'features') {
echo 'class="active"';
} ?> href="#"><i class="fa fa-align-justify"></i> Features<span class="arrow"></span></a>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<li <?php if ($params2 == 'fan') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('features/fan') ?>"><i class="fa fa-arrow-circle-right"></i>Fan Feature</a></li>
<li <?php if ($params2 == 'artist') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('features/artist') ?>"><i class="fa fa-arrow-circle-right"></i>Artist Feature</a></li>
<li <?php if ($params2 == 'fan_feature') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('features/fan_feature') ?>"><i class="fa fa-arrow-circle-right"></i>Fan Features</a>
</li>
</ul>
</li>
<li class="top120">
<span class="wsmenu-click"></span><a <?php if ($params1 == 'local-featured') {
echo 'class="active"';
} ?> href="#"><i class="fa fa-music"></i> Meet Our Artists<span class="arrow"></span></a>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<li><a <?php if ($params1 == 'local-featured') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('local-featured') ?>"><i class="fa fa-arrow-circle-right"></i>Local Featured Artist</a></li>
</ul>
</li>
<!--
<li class="top120">
<li <?php if ($params1 == 'gigs_events') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('gigs_events') ?>"><i class="fa fa-music"></i> SHOWs</a></li>
</li>-->
<li class="top120">
<li <?php if ($params1 == '#') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('#') ?>"><i class="fa fa-search"></i> Search</a>
</li>
<?php
} elseif (isset($user_data['id']) && $user_data['role'] == 1) {
?>
<!--view login with account artists -->
<li>
<span class="wsmenu-click"></span><a <?php if (($params1 == 'artist' && $params2 == 'showgigs') || $params1 == 'blogs' || $params1 == 'gigs_events' || $params1 == 'find-a-musician' || $params1 == 'find-a-fan' || $params1 == 'find-an-artist' || $params2 == 'find-a-fan' || $params2 == 'find-an-artist'|| $params2 == 'world_wide_featured') {
echo 'class="active"';
} ?> href="#"><i class="fa fa-music"></i> Artists & Fans<span class="arrow"></span></a>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<li <?php if ($params1 == 'blogs') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('blogs') ?>"><i class="fa fa-arrow-circle-right"></i>Blogs</a></li>
<li><a <?php if ($params1 == 'gigs_events') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('gigs_events') ?>"><i class="fa fa-arrow-circle-right"></i>Book A Show</a></li>
<li><a <?php if ($params1 == 'find-a-musician') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('find-a-musician') ?>"><i class="fa fa-arrow-circle-right"></i>Musicians Referral</a></li>
<li><a <?php if ($params2 == 'find-a-fan') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('features/find-a-fan') ?>"><i class="fa fa-arrow-circle-right"></i>Find A Fan</a></li>
<li><a <?php if ($params2 == 'find-an-artist') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('features/find-an-artist') ?>"><i class="fa fa-arrow-circle-right"></i>Find AN Artist</a></li>
<!--<li><a <?php if ($params2 == 'showgigs') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('artist/my_location') ?>"><i class="fa fa-arrow-circle-right"></i>Find AN Location</a></li>-->
<li><a <?php if ($params2 == 'showgigs') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('artist/showgigs') ?>"><i class="fa fa-arrow-circle-right"></i>Gig Finder</a></li>
</ul>
</li>
<li>
<span class="wsmenu-click"></span><a <?php if ($params1 == 'new-treding' || $params1 == 'hot_video_picks' || $params1 == 'fancapture' || $params2 == 'hot_video_picks' || $params2 == 'new-trending') {
echo 'class="active"';
} ?> href="#"><i class="fa fa-music"></i> Our Artist's Music <span class="arrow"></span></a>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<li <?php if ($params1 == 'fancapture') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('fancapture') ?>"><i class="fa fa-arrow-circle-right"></i>Meet Our Artist</a></li>
<!--<li><a <?php if ($params2 == 'new-trending') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('features/new-trending') ?>"><i class="fa fa-arrow-circle-right"></i>New & Trending</a></li>-->
<li><a <?php if ($params2 == 'world_wide_featured') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('world_wide_featured') ?>"><i class="fa fa-arrow-circle-right"></i>World Wide Featured Artist</a></li>
<li><a <?php if ($params2 == 'hot_video_picks') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('features/hot_video_picks') ?>"><i class="fa fa-arrow-circle-right"></i>Hot Video Picks</a></li>
</ul>
</li>
<li>
<span class="wsmenu-click"></span><a <?php if ($params1 == 'artists' || $params1 == 'make_money' || $params1 == 'top-100-list') {
echo 'class="active"';
} ?> href="#"><i class="fa fa-music"></i> Earn Money<span class="arrow"></span></a>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<li <?php if ($params1 == 'make_money') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('make_money') ?>"><i class="fa fa-arrow-circle-right"></i>How to Earn Money</a></li>
<!--<li <?php if ($params1 == '#') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('#'); ?>"><i class="fa fa-arrow-circle-right"></i>Signup - AMP</a></li>-->
<li <?php if ($params1 == 'fancapture') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('fancapture') ?>"><i class="fa fa-arrow-circle-right"></i>FCP</a></li>
<li><a <?php if ($params1 == 'top-100-list') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('top-100-list') ?>"><i class="fa fa-arrow-circle-right"></i>Top 100 Fans - Amp Sales</a></li>
</ul>
</li>
<li>
<span class="wsmenu-click"></span>
<a <?php if ($params1 == 'mds' || ($params1 == 'artist' && $params2 == 'amp') || ($params1 == 'artist' && $params2 == 'dashboard_epk') || $params1 == 'chat' || $params1 == 'social_media' || $params1 == 'the_total_tour') {
echo 'class="active"';
} ?> href="#"><i class="fa fa-hand-pointer-o"></i> Tool<span class="arrow"></span></a>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<li><a <?php if ($params1 == 'the_total_tour') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('the_total_tour') ?>"><i class="fa fa-arrow-circle-right"></i>The Total Tour</a></li>
<li><a <?php if ($params1 == 'mds') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('mds') ?>"><i class="fa fa-arrow-circle-right"></i>MDS</a></li>
<li><a <?php if ($params1 == 'artist' && $params2 == 'dashboard_epk') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('artist/dashboard_epk')?>"><i class="fa fa-arrow-circle-right"> </i>EPK</a></li>
<li><a <?php if ($params1 == 'chat') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('chat/dashboard') ?>"><i class="fa fa-arrow-circle-right"> </i>Dashboard Chat</a></li>
<li><a <?php if ($params1 == 'social_media') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('social_media') ?>"><i class="fa fa-arrow-circle-right"></i>Social media</a></li>
</ul>
</li>
<li>
<span class="wsmenu-click"></span><a <?php if (($params1 == 'artist' && $params2 != 'amp' && $params2 != 'showgigs' && $params2 != 'dashboard_epk') && $params2 != 'managerrpk' && $params2 != 'profile') {
echo 'class="active"';
} ?> href="#"><i class="fa fa-heartbeat"></i> Dashboard<span class="arrow"></span></a>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<li><a <?php if ($params1 == 'artist' && $params2 == 'managersong') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('artist/managersong') ?>"><i class="fa fa-arrow-circle-right"></i>Songs</a></li>
<li><a <?php if ($params1 == 'artist' && $params2 == 'managervideo') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('artist/managervideo') ?>"><i class="fa fa-arrow-circle-right"></i>Videos</a></li>
<li><a <?php if ($params1 == 'artist' && $params2 == 'managerphoto') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('artist/managerphoto') ?>"><i class="fa fa-arrow-circle-right"></i>Photos</a></li>
<li><a <?php if ($params1 == 'artist' && $params2 == 'manager-comment') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('artist/manager-comment')?>"><i class="fa fa-arrow-circle-right"></i>Comments</a></li>
<li><a <?php if ($params1 == 'artist' && $params2 == 'managerpress') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('artist/managerpress') ?>"><i class="fa fa-arrow-circle-right"></i>Press</a></li>
<li><a <?php if ($params1 == 'artist' && $params2 == 'blogsmanager') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('artist/blogsmanager') ?>"><i class="fa fa-arrow-circle-right"></i>Blogs</a></li>
<li><a <?php if ($params1 == 'artist' && $params2 == 'basic_info') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('artist/basic_info'); ?>"><i class="fa fa-arrow-circle-right"></i>Customize Profile</a></li>
</ul>
</li>
<?php
} elseif (isset($user_data['id']) && $user_data['role'] == 2) {
?>
<!--view login with account fans -->
<li>
<span class="wsmenu-click"></span><a <?php if ($params2 == 'fan_feature') {
echo 'class="active"';
} ?> href="<?php echo base_url('features/fan_feature') ?>">Fan Features</a>
</li>
<li>
<span class="wsmenu-click"></span><a <?php if ($params2 == 'stop_typing' || $params1 == 'top-100-list') {
echo 'class="active"';
} ?> href="#"><i class="fa fa-align-justify"></i> Social Media<span class="arrow"></span></a>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<!-- TODO: <li <?php if ($params2 == 'stop_typing') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('social_media/stop_typing') ?>"><i class="fa fa-arrow-circle-right"></i>1 stop typing</a></li> -->
<li <?php if ($params1 == 'top-100-list') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('top-100-list') ?>"><i class="fa fa-arrow-circle-right"></i>Top 100 Fans - Amp Sales</a></li>
</ul>
</li>
<li>
<span class="wsmenu-click"></span><a <?php if ($params1 == 'findamusician' || $params1 == 'artists' || $params1 == 'make_money' || $params1 == 'top-100-list') {
echo 'class="active"';
} ?> href="#"><i class="fa fa-music"></i> Earn Money<span class="arrow"></span></a>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<li <?php if ($params1 == 'artists') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('artists') ?>"><i class="fa fa-arrow-circle-right"></i>Create AMP-Video</a></li>
<li <?php if ($params1 == 'make_money') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url(); ?>make_money"><i class="fa fa-arrow-circle-right"></i>Artist Music Player</a></li>
<li <?php if ($params1 == 'top-100-list') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('top-100-list') ?>"><i class="fa fa-arrow-circle-right"></i>Top 100 Fans - Amp Sales</a></li>
</ul>
</li>
<li>
<span class="wsmenu-click"></span><a <?php if ($params1 == 'fancapture' || $params2 == 'hot_video_picks' || $params1 == 'new-treding') {
echo 'class="active"';
} ?> href="#"><i class="fa fa-music"></i> Music Pages<span class="arrow"></span></a>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<li <?php if ($params1 == 'fancapture') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('fancapture') ?>"><i class="fa fa-arrow-circle-right"></i>Meet Our Artist</a></li>
<li><a <?php if ($params2 == 'hot_video_picks') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('features/hot_video_picks') ?>"><i class="fa fa-arrow-circle-right"></i>Hot Video Picks</a></li>
<li <?php if ($params1 == 'features/new-trending') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('features/new-trending') ?>"><i class="fa fa-arrow-circle-right"></i>Trending Artist</a></li>
<li><a <?php if ($params2 == 'world_wide_featured') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('world_wide_featured') ?>"><i class="fa fa-arrow-circle-right"></i>World Wide Featured Artist</a></li>
<li <?php if ($params1 == 'new_artist') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('new_artist') ?>"><i class="fa fa-arrow-circle-right"></i>New Artist</a></li>
</ul>
</li>
<li>
<span class="wsmenu-click"></span><a <?php if ($params1 == 'amp' && $params2 == $user_data['home_page']) {
echo 'class="active"';
} ?> href="<?php echo base_url('amp/'.$user_data['home_page']) ?>"><i class="fa fa-music"></i> Fan Landing</a>
</li>
<li>
<span class="wsmenu-click"></span><a <?php if ($params1 == 'find-a-show') {
echo 'class="active"';
} ?> href="<?php echo base_url('find-a-show') ?>"><i class="fa fa-music"></i> Find A Show</a>
</li>
<?php
}//end account Fan
?>
<?php
} else {
?>
<!-- before login -->
<li>
<span class="wsmenu-click"></span><a <?php if ($params1 == 'features' && ($params2 != 'hot_video_picks')) {
echo 'class="active"';
} ?> href="#"><i class="fa fa-align-justify"></i> Features<span class="arrow"></span></a>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<li <?php if ($params2 == 'fan') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('features/fan') ?>"><i class="fa fa-arrow-circle-right"></i>Fan Feature</a></li>
<li <?php if ($params2 == 'artist') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('features/artist') ?>"><i class="fa fa-arrow-circle-right"></i>Artist Feature</a></li>
</ul>
</li>
<li>
<span class="wsmenu-click"></span><a <?php if ($params1 == 'local-featured' || $params2 == 'hot_video_picks') {
echo 'class="active"';
} ?> href="#"><i class="fa fa-music"></i> Meet Our Artists<span class="arrow"></span></a>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<li <?php if ($params2 == 'hot_video_picks') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('features/hot_video_picks') ?>"><i class="fa fa-arrow-circle-right"></i>Hot Video Picks</a></li>
<li><a <?php if ($params1 == 'local-featured') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('local-featured') ?>"><i class="fa fa-arrow-circle-right"></i>Local Featured Artist</a></li>
<li><a <?php if ($params2 == 'world_wide_featured') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('world_wide_featured') ?>"><i class="fa fa-arrow-circle-right"></i>World Wide Featured Artist</a></li>
</ul>
</li>
<li>
<span class="wsmenu-click"></span><a <?php if ($params1 == 'findamusician' || $params1 == 'artists' || $params1 == 'make_money' || $params1 == 'top-100-list') {
echo 'class="active"';
} ?> href="#"><i class="fa fa-music"></i> Earn Money<span class="arrow"></span></a>
<ul class="wsmenu-submenu" style="min-width: 160px;">
<li <?php if ($params1 == 'artists') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('artists') ?>"><i class="fa fa-arrow-circle-right"></i>Create AMP-Video</a></li>
<li <?php if ($params1 == 'make_money') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url(); ?>make_money"><i class="fa fa-arrow-circle-right"></i>Artist Music Player</a></li>
<li <?php if ($params1 == 'top-100-list') {
echo 'class="activesub"';
} ?>><a href="<?php echo base_url('top-100-list') ?>"><i class="fa fa-arrow-circle-right"></i>Top 100 Fans - Amp Sales</a></li>
</ul>
</li>
<li>
<span class="wsmenu-click"></span><a <?php if ($params1 == 'gigs_events') {
echo 'class="active"';
} ?> href="<?php echo base_url('gigs_events') ?>"><i class="fa fa-music"></i> SHOWs</a>
</li>
<li>
<span class="wsmenu-click"></span><a <?php if ($params1 == '#') {
echo 'class="active"';
} ?> href="<?php echo base_url('#') ?>"><i class="fa fa-music"></i> Search</a>
</li>
<?php
}?>
</ul>
<ul class="mobile-sub wsmenu-list wsmenu-list-right">
<?php
if (isset($user_data)) {
?>
<li>
<span class="wsmenu-click"></span><a <?php if (($params1 == 'account' && $params2 == 'credit') || ($params1 == 'subscriptions' && $params2 == 'upgrade') || ($params1 == 'artist' && $params2 == 'profile')) {
echo 'class="active"';
}
if($user_data['role'] == 1)
{
$image_url = $this->M_user->get_avata($user_data['id']);
}
else{
$image_url = $this->M_user->get_avata_flp($user_data['id']);
}
?> href="#"><img src="<?php echo $image_url?>" width="30"/> <span><?php echo $this->M_user->get_name($user_data['id'])?></span><span class="arrow"></span></a>
<ul class="wsmenu-submenu responsive_menu" style="min-width: 160px;">
<?php
if ($user_data['role'] == 1) {
if ($user_data['is_admin'] != 0) {
?>
<li><a href="<?php echo base_url('admin/dashboard') ?>"><i class="fa fa-tachometer"></i>Admin Dashboard</a></li>
<?php
} ?>
<li><a <?php if ($params1 == 'artist' && $params2 == 'profile') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('artist/profile') ?>"><i class="fa fa-tachometer"></i> Create Profile</a></li>
<li><a <?php if ($params1 == 'amper' && $params2 == 'dashboard') {
echo 'class="activesub"';
} ?>href="<?php echo base_url('amper/dashboard') ?>"><i class="fa fa-arrow-circle-right"></i>Music-Player Dashboard</a></li>
<?php
if ($check_upgrade) {
?><li><a <?php if ($params1 == 'subscriptions' && $params2 == 'subscriptions_plan') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('subscriptions/subscriptions_plan') ?>"><i class="fa fa-tachometer"></i> Subscriptions & Billing</a></li>
<?php
} ?>
<!--<li><a <?php if ($params1 == 'subscriptions' && $params2 == 'upgrade') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('subscriptions/upgrade') ?>"><i class="fa fa-tachometer"></i> Upgrade Subscriptions </a></li>
<li><a <?php if ($params1 == 'subscriptions' && $params2 == 'featured') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('subscriptions/featured') ?>"><i class="fa fa-tachometer"></i> Upgrade Subscription – Homepage Placement – Get Fans, Get Noticed! </a></li>-->
<li><a href="<?php echo base_url('account/logout') ?>"><i class="fa fa-sign-out"></i>Logout</a></li>
<?php
} elseif ($user_data['role'] == 2) {
?>
<li><a <?php if ($params1 == 'amper' && $params2 == 'dashboard') {
echo 'class="activesub"';
} ?>href="<?php echo base_url('amper/dashboard') ?>"><i class="fa fa-arrow-circle-right"></i>AMPER Dashboard</a></li>
<?php
if ($check_upgrade) {
?><li><a <?php if ($params1 == 'subscriptions' && $params2 == 'subscriptions_plan') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('subscriptions/subscriptions_plan') ?>"><i class="fa fa-tachometer"></i> Subscriptions/Billing </a></li>
<?php
} else {
?><li><a <?php if ($params1 == 'subscriptions' && $params2 == 'upgrade') {
echo 'class="activesub"';
} ?> href="<?php echo base_url('subscriptions/upgrade') ?>"><i class="fa fa-tachometer"></i> Upgrade Subscriptions </a></li>
<?php
} ?>
<li><a href="<?php echo base_url('chat/dashboard') ?>"><i class="fa fa-arrow-circle-right"></i>Dashboard Chat</a></li>
<li><a href="<?php echo base_url('account/logout') ?>"><i class="fa fa-sign-out"></i> Logout</a></li>
<?php
} else {
?>
<li><a href="<?php echo base_url('account/logout') ?>"><i class="fa fa-sign-out"></i> Logout</a></li>
<?php
} ?>
</ul>
</li>
<?php
} else {
?>
<li><a <?php if ($params1 == 'account' && $params2 == 'signup') {
echo 'class="active"';
} ?> href="<?php echo base_url('account/signup') ?>"><i class="fa fa-user-plus"></i> Join</a></li>
<li><a <?php if ($params1 == 'account' && $params2 == 'login') {
echo 'class="active"';
} ?> href="<?php echo base_url('account/login') ?>"><i class="fa fa-sign-in"></i> Login</a></li>
<?php
}
?>
<?php if (!empty($user_data['id'])) {
$notyfi_ = $this->M_notify->getnotify($user_data['id'], 1); ?>
<li class=" dropdown notifications noti2 ">
<a href="" class="dropdown-toggle" data-toggle="dropdown">
<i class="fa fa-globe" style="font-size:2em;display: block!important"></i>
<?php if (count($notyfi_) != 0) {
?><span class="badge bg-lightred"><?php echo count($notyfi_)?></span><?php
} ?>
</a>
<div class="dropdown-menu pull-right with-arrow panel panel-default animated littleFadeInLeft">
<div class="panel-heading">
You have <strong><?php echo count($notyfi_)?></strong> notifications unread
</div>
<ul class="list-group">
<?php
$notify = $this->M_notify->getnotify($user_data['id']);
foreach ($notify as $row) {
if ($row['type'] == 'invite') {
$link = base_url().'chat/dashboard';
} elseif ($row['type'] == 'amper_register') {
$link = base_url().'amper/dashboard_affiliates';
} elseif ($row['type'] == 'amper_register') {
$link = base_url().'amper/dashboard_affiliates';
} elseif ($row['type'] == 'Invite tour') {
$link = $row['active_url'];
} else {
$link = '#';
} ?>
<li class="list-group-item">
<a role="button" tabindex="0" class="media" href="<?=$link?>">
<div class="media-body">
<span class="block"><?php echo $row['messages']?></span>
<small class="text-muted"><?php echo $this->M_user->time_calculation($row['time'])?></small>
</div>
</a>
</li>
<?php
} ?>
</ul>
<div class="panel-footer">
<a href="<?=base_url('notifications/all')?>" role="button" tabindex="0">Show all notifications <i class="fa fa-angle-right pull-right"></i></a>
</div>
</div>
</li>
<?php
}?>
</ul>
</nav>
<!--Menu HTML Code-->
</div>
</div>
<div class="padingheader-top"></div>
| akashbachhania/jeet99 | application/views/includes/navbar.php | PHP | mit | 45,301 |
// Copyright (c) 2009-2010 Satoshi Nakamoto
// Copyright (c) 2009-2012 The Bitcoin developers
// Copyright (c) 2013 The NovaCoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "txdb.h"
#include "miner.h"
#include "kernel.h"
using namespace std;
//////////////////////////////////////////////////////////////////////////////
//
// BitcoinMiner
//
extern unsigned int nMinerSleep;
int static FormatHashBlocks(void* pbuffer, unsigned int len)
{
unsigned char* pdata = (unsigned char*)pbuffer;
unsigned int blocks = 1 + ((len + 8) / 64);
unsigned char* pend = pdata + 64 * blocks;
memset(pdata + len, 0, 64 * blocks - len);
pdata[len] = 0x80;
unsigned int bits = len * 8;
pend[-1] = (bits >> 0) & 0xff;
pend[-2] = (bits >> 8) & 0xff;
pend[-3] = (bits >> 16) & 0xff;
pend[-4] = (bits >> 24) & 0xff;
return blocks;
}
static const unsigned int pSHA256InitState[8] =
{0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19};
void SHA256Transform(void* pstate, void* pinput, const void* pinit)
{
SHA256_CTX ctx;
unsigned char data[64];
SHA256_Init(&ctx);
for (int i = 0; i < 16; i++)
((uint32_t*)data)[i] = ByteReverse(((uint32_t*)pinput)[i]);
for (int i = 0; i < 8; i++)
ctx.h[i] = ((uint32_t*)pinit)[i];
SHA256_Update(&ctx, data, sizeof(data));
for (int i = 0; i < 8; i++)
((uint32_t*)pstate)[i] = ctx.h[i];
}
// Some explaining would be appreciated
class COrphan
{
public:
CTransaction* ptx;
set<uint256> setDependsOn;
double dPriority;
double dFeePerKb;
COrphan(CTransaction* ptxIn)
{
ptx = ptxIn;
dPriority = dFeePerKb = 0;
}
void print() const
{
printf("COrphan(hash=%s, dPriority=%.1f, dFeePerKb=%.1f)\n",
ptx->GetHash().ToString().substr(0,10).c_str(), dPriority, dFeePerKb);
BOOST_FOREACH(uint256 hash, setDependsOn)
printf(" setDependsOn %s\n", hash.ToString().substr(0,10).c_str());
}
};
uint64_t nLastBlockTx = 0;
uint64_t nLastBlockSize = 0;
int64_t nLastCoinStakeSearchInterval = 0;
// We want to sort transactions by priority and fee, so:
typedef boost::tuple<double, double, CTransaction*> TxPriority;
class TxPriorityCompare
{
bool byFee;
public:
TxPriorityCompare(bool _byFee) : byFee(_byFee) { }
bool operator()(const TxPriority& a, const TxPriority& b)
{
if (byFee)
{
if (a.get<1>() == b.get<1>())
return a.get<0>() < b.get<0>();
return a.get<1>() < b.get<1>();
}
else
{
if (a.get<0>() == b.get<0>())
return a.get<1>() < b.get<1>();
return a.get<0>() < b.get<0>();
}
}
};
// CreateNewBlock: create new block (without proof-of-work/proof-of-stake)
CBlock* CreateNewBlock(CWallet* pwallet, bool fProofOfStake, int64_t* pFees)
{
// Create new block
auto_ptr<CBlock> pblock(new CBlock());
if (!pblock.get())
return NULL;
CBlockIndex* pindexPrev = pindexBest;
// Create coinbase tx
CTransaction txNew;
txNew.vin.resize(1);
txNew.vin[0].prevout.SetNull();
txNew.vout.resize(1);
if (!fProofOfStake)
{
CReserveKey reservekey(pwallet);
CPubKey pubkey;
if (!reservekey.GetReservedKey(pubkey))
return NULL;
txNew.vout[0].scriptPubKey.SetDestination(pubkey.GetID());
}
else
{
// Height first in coinbase required for block.version=2
txNew.vin[0].scriptSig = (CScript() << pindexPrev->nHeight+1) + COINBASE_FLAGS;
assert(txNew.vin[0].scriptSig.size() <= 100);
txNew.vout[0].SetEmpty();
}
// Add our coinbase tx as first transaction
pblock->vtx.push_back(txNew);
// Largest block you're willing to create:
unsigned int nBlockMaxSize = GetArg("-blockmaxsize", MAX_BLOCK_SIZE_GEN/2);
// Limit to betweeen 1K and MAX_BLOCK_SIZE-1K for sanity:
nBlockMaxSize = std::max((unsigned int)1000, std::min((unsigned int)(MAX_BLOCK_SIZE-1000), nBlockMaxSize));
// How much of the block should be dedicated to high-priority transactions,
// included regardless of the fees they pay
unsigned int nBlockPrioritySize = GetArg("-blockprioritysize", 27000);
nBlockPrioritySize = std::min(nBlockMaxSize, nBlockPrioritySize);
// Minimum block size you want to create; block will be filled with free transactions
// until there are no more or the block reaches this size:
unsigned int nBlockMinSize = GetArg("-blockminsize", 0);
nBlockMinSize = std::min(nBlockMaxSize, nBlockMinSize);
// Fee-per-kilobyte amount considered the same as "free"
// Be careful setting this: if you set it to zero then
// a transaction spammer can cheaply fill blocks using
// 1-satoshi-fee transactions. It should be set above the real
// cost to you of processing a transaction.
int64_t nMinTxFee = MIN_TX_FEE;
if (mapArgs.count("-mintxfee"))
ParseMoney(mapArgs["-mintxfee"], nMinTxFee);
pblock->nBits = GetNextTargetRequired(pindexPrev, fProofOfStake);
// Collect memory pool transactions into the block
int64_t nFees = 0;
{
LOCK2(cs_main, mempool.cs);
CTxDB txdb("r");
// Priority order to process transactions
list<COrphan> vOrphan; // list memory doesn't move
map<uint256, vector<COrphan*> > mapDependers;
// This vector will be sorted into a priority queue:
vector<TxPriority> vecPriority;
vecPriority.reserve(mempool.mapTx.size());
for (map<uint256, CTransaction>::iterator mi = mempool.mapTx.begin(); mi != mempool.mapTx.end(); ++mi)
{
CTransaction& tx = (*mi).second;
if (tx.IsCoinBase() || tx.IsCoinStake() || !IsFinalTx(tx, pindexPrev->nHeight + 1))
continue;
COrphan* porphan = NULL;
double dPriority = 0;
int64_t nTotalIn = 0;
bool fMissingInputs = false;
BOOST_FOREACH(const CTxIn& txin, tx.vin)
{
// Read prev transaction
CTransaction txPrev;
CTxIndex txindex;
if (!txPrev.ReadFromDisk(txdb, txin.prevout, txindex))
{
// This should never happen; all transactions in the memory
// pool should connect to either transactions in the chain
// or other transactions in the memory pool.
if (!mempool.mapTx.count(txin.prevout.hash))
{
printf("ERROR: mempool transaction missing input\n");
if (fDebug) assert("mempool transaction missing input" == 0);
fMissingInputs = true;
if (porphan)
vOrphan.pop_back();
break;
}
// Has to wait for dependencies
if (!porphan)
{
// Use list for automatic deletion
vOrphan.push_back(COrphan(&tx));
porphan = &vOrphan.back();
}
mapDependers[txin.prevout.hash].push_back(porphan);
porphan->setDependsOn.insert(txin.prevout.hash);
nTotalIn += mempool.mapTx[txin.prevout.hash].vout[txin.prevout.n].nValue;
continue;
}
int64_t nValueIn = txPrev.vout[txin.prevout.n].nValue;
nTotalIn += nValueIn;
int nConf = txindex.GetDepthInMainChain();
dPriority += (double)nValueIn * nConf;
}
if (fMissingInputs) continue;
// Priority is sum(valuein * age) / txsize
unsigned int nTxSize = ::GetSerializeSize(tx, SER_NETWORK, PROTOCOL_VERSION);
dPriority /= nTxSize;
// This is a more accurate fee-per-kilobyte than is used by the client code, because the
// client code rounds up the size to the nearest 1K. That's good, because it gives an
// incentive to create smaller transactions.
double dFeePerKb = double(nTotalIn-tx.GetValueOut()) / (double(nTxSize)/1000.0);
if (porphan)
{
porphan->dPriority = dPriority;
porphan->dFeePerKb = dFeePerKb;
}
else
vecPriority.push_back(TxPriority(dPriority, dFeePerKb, &(*mi).second));
}
// Collect transactions into block
map<uint256, CTxIndex> mapTestPool;
uint64_t nBlockSize = 1000;
uint64_t nBlockTx = 0;
int nBlockSigOps = 100;
bool fSortedByFee = (nBlockPrioritySize <= 0);
TxPriorityCompare comparer(fSortedByFee);
std::make_heap(vecPriority.begin(), vecPriority.end(), comparer);
while (!vecPriority.empty())
{
// Take highest priority transaction off the priority queue:
double dPriority = vecPriority.front().get<0>();
double dFeePerKb = vecPriority.front().get<1>();
CTransaction& tx = *(vecPriority.front().get<2>());
std::pop_heap(vecPriority.begin(), vecPriority.end(), comparer);
vecPriority.pop_back();
// Size limits
unsigned int nTxSize = ::GetSerializeSize(tx, SER_NETWORK, PROTOCOL_VERSION);
if (nBlockSize + nTxSize >= nBlockMaxSize)
continue;
// Legacy limits on sigOps:
unsigned int nTxSigOps = tx.GetLegacySigOpCount();
if (nBlockSigOps + nTxSigOps >= MAX_BLOCK_SIGOPS)
continue;
// Timestamp limit
if (tx.nTime > GetAdjustedTime() || (fProofOfStake && tx.nTime > pblock->vtx[0].nTime))
continue;
// Transaction fee
int64_t nMinFee = tx.GetMinFee(nBlockSize, GMF_BLOCK);
// Skip free transactions if we're past the minimum block size:
if (fSortedByFee && (dFeePerKb < nMinTxFee) && (nBlockSize + nTxSize >= nBlockMinSize))
continue;
// Prioritize by fee once past the priority size or we run out of high-priority
// transactions:
if (!fSortedByFee &&
((nBlockSize + nTxSize >= nBlockPrioritySize) || (dPriority < COIN * 144 / 250)))
{
fSortedByFee = true;
comparer = TxPriorityCompare(fSortedByFee);
std::make_heap(vecPriority.begin(), vecPriority.end(), comparer);
}
// Connecting shouldn't fail due to dependency on other memory pool transactions
// because we're already processing them in order of dependency
map<uint256, CTxIndex> mapTestPoolTmp(mapTestPool);
MapPrevTx mapInputs;
bool fInvalid;
if (!tx.FetchInputs(txdb, mapTestPoolTmp, false, true, mapInputs, fInvalid))
continue;
int64_t nTxFees = tx.GetValueIn(mapInputs)-tx.GetValueOut();
if (nTxFees < nMinFee)
continue;
nTxSigOps += tx.GetP2SHSigOpCount(mapInputs);
if (nBlockSigOps + nTxSigOps >= MAX_BLOCK_SIGOPS)
continue;
if (!tx.ConnectInputs(txdb, mapInputs, mapTestPoolTmp, CDiskTxPos(1,1,1), pindexPrev, false, true))
continue;
mapTestPoolTmp[tx.GetHash()] = CTxIndex(CDiskTxPos(1,1,1), tx.vout.size());
swap(mapTestPool, mapTestPoolTmp);
// Added
pblock->vtx.push_back(tx);
nBlockSize += nTxSize;
++nBlockTx;
nBlockSigOps += nTxSigOps;
nFees += nTxFees;
if (fDebug && GetBoolArg("-printpriority"))
{
printf("priority %.1f feeperkb %.1f txid %s\n",
dPriority, dFeePerKb, tx.GetHash().ToString().c_str());
}
// Add transactions that depend on this one to the priority queue
uint256 hash = tx.GetHash();
if (mapDependers.count(hash))
{
BOOST_FOREACH(COrphan* porphan, mapDependers[hash])
{
if (!porphan->setDependsOn.empty())
{
porphan->setDependsOn.erase(hash);
if (porphan->setDependsOn.empty())
{
vecPriority.push_back(TxPriority(porphan->dPriority, porphan->dFeePerKb, porphan->ptx));
std::push_heap(vecPriority.begin(), vecPriority.end(), comparer);
}
}
}
}
}
nLastBlockTx = nBlockTx;
nLastBlockSize = nBlockSize;
if (fDebug && GetBoolArg("-printpriority"))
printf("CreateNewBlock(): total size %"PRIu64"\n", nBlockSize);
if (!fProofOfStake)
pblock->vtx[0].vout[0].nValue = GetProofOfWorkReward(nFees);
if (pFees)
*pFees = nFees;
// Fill in header
pblock->hashPrevBlock = pindexPrev->GetBlockHash();
pblock->nTime = max(pindexPrev->GetPastTimeLimit()+1, pblock->GetMaxTransactionTime());
pblock->nTime = max(pblock->GetBlockTime(), PastDrift(pindexPrev->GetBlockTime()));
if (!fProofOfStake)
pblock->UpdateTime(pindexPrev);
pblock->nNonce = 0;
}
return pblock.release();
}
void IncrementExtraNonce(CBlock* pblock, CBlockIndex* pindexPrev, unsigned int& nExtraNonce)
{
// Update nExtraNonce
static uint256 hashPrevBlock;
if (hashPrevBlock != pblock->hashPrevBlock)
{
nExtraNonce = 0;
hashPrevBlock = pblock->hashPrevBlock;
}
++nExtraNonce;
unsigned int nHeight = pindexPrev->nHeight+1; // Height first in coinbase required for block.version=2
pblock->vtx[0].vin[0].scriptSig = (CScript() << nHeight << CBigNum(nExtraNonce)) + COINBASE_FLAGS;
assert(pblock->vtx[0].vin[0].scriptSig.size() <= 100);
pblock->hashMerkleRoot = pblock->BuildMerkleTree();
}
void FormatHashBuffers(CBlock* pblock, char* pmidstate, char* pdata, char* phash1)
{
//
// Pre-build hash buffers
//
struct
{
struct unnamed2
{
int nVersion;
uint256 hashPrevBlock;
uint256 hashMerkleRoot;
unsigned int nTime;
unsigned int nBits;
unsigned int nNonce;
}
block;
unsigned char pchPadding0[64];
uint256 hash1;
unsigned char pchPadding1[64];
}
tmp;
memset(&tmp, 0, sizeof(tmp));
tmp.block.nVersion = pblock->nVersion;
tmp.block.hashPrevBlock = pblock->hashPrevBlock;
tmp.block.hashMerkleRoot = pblock->hashMerkleRoot;
tmp.block.nTime = pblock->nTime;
tmp.block.nBits = pblock->nBits;
tmp.block.nNonce = pblock->nNonce;
FormatHashBlocks(&tmp.block, sizeof(tmp.block));
FormatHashBlocks(&tmp.hash1, sizeof(tmp.hash1));
// Byte swap all the input buffer
for (unsigned int i = 0; i < sizeof(tmp)/4; i++)
((unsigned int*)&tmp)[i] = ByteReverse(((unsigned int*)&tmp)[i]);
// Precalc the first half of the first hash, which stays constant
SHA256Transform(pmidstate, &tmp.block, pSHA256InitState);
memcpy(pdata, &tmp.block, 128);
memcpy(phash1, &tmp.hash1, 64);
}
bool CheckWork(CBlock* pblock, CWallet& wallet, CReserveKey& reservekey)
{
uint256 hashBlock = pblock->GetHash();
uint256 hashTarget = CBigNum().SetCompact(pblock->nBits).getuint256();
if(!pblock->IsProofOfWork())
return error("CheckWork() : %s is not a proof-of-work block", hashBlock.GetHex().c_str());
if (hashBlock > hashTarget)
return error("CheckWork() : proof-of-work not meeting target");
//// debug print
printf("CheckWork() : new proof-of-work block found \n hash: %s \ntarget: %s\n", hashBlock.GetHex().c_str(), hashTarget.GetHex().c_str());
pblock->print();
printf("generated %s\n", FormatMoney(pblock->vtx[0].vout[0].nValue).c_str());
// Found a solution
{
LOCK(cs_main);
if (pblock->hashPrevBlock != hashBestChain)
return error("CheckWork() : generated block is stale");
// Remove key from key pool
reservekey.KeepKey();
// Track how many getdata requests this block gets
{
LOCK(wallet.cs_wallet);
wallet.mapRequestCount[hashBlock] = 0;
}
// Process this block the same as if we had received it from another node
if (!ProcessBlock(NULL, pblock))
return error("CheckWork() : ProcessBlock, block not accepted");
}
return true;
}
bool CheckStake(CBlock* pblock, CWallet& wallet)
{
uint256 proofHash = 0, hashTarget = 0;
uint256 hashBlock = pblock->GetHash();
if(!pblock->IsProofOfStake())
return error("CheckStake() : %s is not a proof-of-stake block", hashBlock.GetHex().c_str());
// verify hash target and signature of coinstake tx
if (!CheckProofOfStake(pblock->vtx[1], pblock->nBits, proofHash, hashTarget))
return error("CheckStake() : proof-of-stake checking failed");
//// debug print
printf("CheckStake() : new proof-of-stake block found \n hash: %s \nproofhash: %s \ntarget: %s\n", hashBlock.GetHex().c_str(), proofHash.GetHex().c_str(), hashTarget.GetHex().c_str());
pblock->print();
printf("out %s\n", FormatMoney(pblock->vtx[1].GetValueOut()).c_str());
// Found a solution
{
LOCK(cs_main);
if (pblock->hashPrevBlock != hashBestChain)
return error("CheckStake() : generated block is stale");
// Track how many getdata requests this block gets
{
LOCK(wallet.cs_wallet);
wallet.mapRequestCount[hashBlock] = 0;
}
// Process this block the same as if we had received it from another node
if (!ProcessBlock(NULL, pblock))
return error("CheckStake() : ProcessBlock, block not accepted");
}
return true;
}
void StakeMiner(CWallet *pwallet)
{
SetThreadPriority(THREAD_PRIORITY_LOWEST);
// Make this thread recognisable as the mining thread
RenameThread("EddieCoin-miner");
bool fTryToSync = true;
while (true)
{
if (fShutdown)
return;
while (pwallet->IsLocked())
{
nLastCoinStakeSearchInterval = 0;
MilliSleep(1000);
if (fShutdown)
return;
}
while (vNodes.empty() || IsInitialBlockDownload())
{
nLastCoinStakeSearchInterval = 0;
fTryToSync = true;
MilliSleep(1000);
if (fShutdown)
return;
}
if (fTryToSync)
{
fTryToSync = false;
if (vNodes.size() < 3 || nBestHeight < GetNumBlocksOfPeers())
{
MilliSleep(60000);
continue;
}
}
//
// Create new block
//
int64_t nFees;
auto_ptr<CBlock> pblock(CreateNewBlock(pwallet, true, &nFees));
if (!pblock.get())
return;
// Trying to sign a block
if (pblock->SignBlock(*pwallet, nFees))
{
SetThreadPriority(THREAD_PRIORITY_NORMAL);
CheckStake(pblock.get(), *pwallet);
SetThreadPriority(THREAD_PRIORITY_LOWEST);
MilliSleep(500);
}
else
MilliSleep(nMinerSleep);
}
}
| eddietributecoin/EddieCoin | src/miner.cpp | C++ | mit | 19,965 |
using System.Threading.Tasks;
using Lykke.Service.ExchangeConnector.Client.Models;
using MarginTrading.Backend.Core;
using MarginTrading.Contract.RabbitMqMessageModels;
namespace MarginTrading.Backend.Services.Notifications
{
public interface IRabbitMqNotifyService
{
Task AccountHistory(string transactionId, string accountId, string clientId, decimal amount, decimal balance,
decimal withdrawTransferLimit, AccountHistoryType type, decimal amountInUsd = default, string comment = null,
string eventSourceId = null, string legalEntity = null, string auditLog = null);
Task OrderHistory(IOrder order, OrderUpdateType orderUpdateType);
Task OrderReject(IOrder order);
Task OrderBookPrice(InstrumentBidAskPair quote);
Task OrderChanged(IOrder order);
Task AccountUpdated(IMarginTradingAccount account);
Task AccountStopout(string clientId, string accountId, int positionsCount, decimal totalPnl);
Task UserUpdates(bool updateAccountAssets, bool updateAccounts, string[] clientIds);
void Stop();
Task AccountCreated(IMarginTradingAccount account);
Task AccountDeleted(IMarginTradingAccount account);
Task AccountMarginEvent(AccountMarginEventMessage eventMessage);
Task UpdateAccountStats(AccountStatsUpdateMessage message);
Task NewTrade(TradeContract trade);
Task ExternalOrder(ExecutionReport trade);
}
} | LykkeCity/MT | src/MarginTrading.Backend.Services/Notifications/IRabbitMqNotifyService.cs | C# | mit | 1,359 |
require 'ims/lti'
class GuideController < ApplicationController
def home
end
def xml_builder
@placements = CanvasExtensions::PLACEMENTS
end
def xml_config
tc = IMS::LTI::Services::ToolConfig.new(:title => "Example Tool Provider", :launch_url => blti_launch_url)
tc.description = "This is a Sample Tool Provider."
if query_params = request.query_parameters
platform = CanvasExtensions::PLATFORM
tc.set_ext_param(platform, :selection_width, query_params[:selection_width])
tc.set_ext_param(platform, :selection_height, query_params[:selection_height])
tc.set_ext_param(platform, :privacy_level, 'public')
tc.set_ext_param(platform, :text, 'Extension text')
tc.set_ext_param(platform, :icon_url, view_context.asset_url('selector.png'))
tc.set_ext_param(platform, :domain, request.host_with_port)
query_params[:custom_params].each { |_, v| tc.set_custom_param(v[:name].to_sym, v[:value]) } if query_params[:custom_params]
query_params[:placements].each { |k, _| create_placement(tc, k.to_sym) } if query_params[:placements]
end
render xml: tc.to_xml(:indent => 2)
end
private
def create_placement(tc, placement_key)
message_type = request.query_parameters["#{placement_key}_message_type"] || :basic_lti_request
navigation_params = case message_type
when 'content_item_selection'
{url: content_item_launch_url, message_type: 'ContentItemSelection'}
when 'content_item_selection_request'
{url: content_item_request_launch_url, message_type: 'ContentItemSelectionRequest'}
else
{url: blti_launch_url}
end
navigation_params[:icon_url] = view_context.asset_url('selector.png') + "?#{placement_key}"
navigation_params[:canvas_icon_class] = "icon-lti"
navigation_params[:text] = "#{placement_key} Text"
tc.set_ext_param(CanvasExtensions::PLATFORM, placement_key, navigation_params)
end
end
| instructure/lti_tool_provider_example | app/controllers/guide_controller.rb | Ruby | mit | 2,081 |
require_relative '../../../spec_helper'
require_relative 'shared/constants'
require_relative '../../../core/file/shared/read'
describe "Digest::SHA512.file" do
describe "when passed a path to a file that exists" do
before :each do
@file = tmp("md5_temp")
touch(@file, 'wb') {|f| f.write SHA512Constants::Contents }
end
after :each do
rm_r @file
end
it "returns a Digest::SHA512 object" do
Digest::SHA512.file(@file).should be_kind_of(Digest::SHA512)
end
it "returns a Digest::SHA512 object with the correct digest" do
Digest::SHA512.file(@file).digest.should == SHA512Constants::Digest
end
it "calls #to_str on an object and returns the Digest::SHA512 with the result" do
obj = mock("to_str")
obj.should_receive(:to_str).and_return(@file)
result = Digest::SHA512.file(obj)
result.should be_kind_of(Digest::SHA512)
result.digest.should == SHA512Constants::Digest
end
end
it_behaves_like :file_read_directory, :file, Digest::SHA512
it "raises a Errno::ENOENT when passed a path that does not exist" do
lambda { Digest::SHA512.file("") }.should raise_error(Errno::ENOENT)
end
it "raises a TypeError when passed nil" do
lambda { Digest::SHA512.file(nil) }.should raise_error(TypeError)
end
end
| ruby/rubyspec | library/digest/sha512/file_spec.rb | Ruby | mit | 1,321 |
using System;
using System.Threading.Tasks;
using Anotar.NLog;
using CroquetAustralia.Domain.Services.Serializers;
using Microsoft.WindowsAzure.Storage;
using Microsoft.WindowsAzure.Storage.Queue;
namespace CroquetAustralia.Domain.Services.Queues
{
public abstract class QueueBase : IQueueBase
{
private readonly Lazy<CloudQueue> _lazyQueue;
private readonly string _queueName;
private readonly QueueMessageSerializer _serializer;
protected QueueBase(string queueName, IAzureStorageConnectionString connectionString)
: this(queueName, connectionString, new QueueMessageSerializer())
{
}
protected QueueBase(string queueName, IAzureStorageConnectionString connectionString, QueueMessageSerializer serializer)
{
_queueName = queueName;
_serializer = serializer;
_lazyQueue = new Lazy<CloudQueue>(() => GetQueue(queueName, connectionString.Value));
}
private CloudQueue CloudQueue => _lazyQueue.Value;
public async Task AddMessageAsync(object @event)
{
LogTo.Info($"Adding '{@event.GetType().FullName}' to '{_queueName}' queue.");
var content = _serializer.Serialize(@event);
var message = new CloudQueueMessage(content);
await CloudQueue.AddMessageAsync(message);
}
private static CloudQueue GetQueue(string queueName, string connectionString)
{
var storageAccount = CloudStorageAccount.Parse(connectionString);
var queueClient = storageAccount.CreateCloudQueueClient();
var queue = queueClient.GetQueueReference(queueName);
queue.CreateIfNotExists();
return queue;
}
}
} | croquet-australia/api.croquet-australia.com.au | source/CroquetAustralia.Domain/Services/Queues/QueueBase.cs | C# | mit | 1,778 |
class CreatePhrases < ActiveRecord::Migration
def change
create_table :phrases do |t|
t.belongs_to :country
t.string :hello
t.string :please
t.string :thanks
t.string :bathroom
t.timestamps
end
end
end
| cicadas-2014/Elsewhere | db/migrate/20140626220711_create_phrases.rb | Ruby | mit | 250 |
const electron = require('electron');
const ipcRenderer = electron.ipcRenderer;
window.onload = function() {
ipcRenderer.send('game-preview-loaded');
}
ipcRenderer.on('game-preview-start', function(event, data) {
var app = new Application({
// resize: true,
fullscreen: true,
antyAliasing: true,
preload: function(){
console.log(data.assets);
//load images
for (var i = 0; i < data.assets.length; i++) {
var meta = data.assets[i];
if(meta.type == 'script') {
require(meta.path);
// connect script with uuid
} else {
this.loader.load(meta.type, meta.path, meta.name, meta.uuid);
}
}
//load scene (json)
// this.loader.load('json', scene file path, 'scene.json');
},
//instantiate scene objects
loaded: function(){
//instantiate all object
for(var i = 0; i < data.sceneFile.length; i++) {
if(data.sceneFile[i].tag == 'mainCamera') {
this.mainCamera = this.scene.instantiate(data.sceneFile[i]);
} else {
this.scene.instantiate(data.sceneFile[i]);
}
}
},
//actual start function
start: function(){
//show must go on
},
preupdate: function(){
},
postupdate: function(){
},
postrender: function(){
// var layer = this.mainCamera.camera.layer;
// layer.ctx.save();
// layer.textAlign('left');
// layer.font('30px Arial');
// layer.fillStyle('white');
//
// var fps = (Time.deltaTime).toFixed(3);
//
// layer.fillText(fps || 0, 0,30);
// layer.ctx.restore();
}
});
})
| Baransu/Amble-Engine | src/game-preview/js/index.js | JavaScript | mit | 1,676 |
module.exports = {
token: 'TELEGRAM_BOT_TOKEN',
polling: {
timeout: 3,
limit: 100
}
};
| yurich/vow-telegram-bot | examples/settings.js | JavaScript | mit | 115 |
/* ColorComboBoxTest.cs --
* Ars Magna project, http://arsmagna.ru
* -------------------------------------------------------
* Status: poor
*/
#region Using directives
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using AM;
using AM.Windows.Forms;
using CodeJam;
using IrbisUI;
using JetBrains.Annotations;
using ManagedIrbis;
using MoonSharp.Interpreter;
using Newtonsoft.Json;
#endregion
namespace UITests
{
public sealed class ColorComboBoxTest
: IUITest
{
#region IUITest members
public void Run
(
IWin32Window ownerWindow
)
{
using (Form form = new Form())
{
form.Size = new Size(800, 600);
ColorComboBox colorBox = new ColorComboBox
{
Location = new Point(10, 10),
Width = 200
};
form.Controls.Add(colorBox);
TextBox textBox = new TextBox
{
Location = new Point(310, 10),
Width = 300
};
form.Controls.Add(textBox);
colorBox.SelectedIndexChanged += (sender, args) =>
{
textBox.Text = colorBox.SelectedColor.ToString();
};
form.ShowDialog(ownerWindow);
}
}
#endregion
}
}
| amironov73/ManagedIrbis | Source/UITests/Sources/Tests/ColorComboBoxTest.cs | C# | mit | 1,565 |
<?php
return array(
'_root_' => 'admin/index', // The default route
'_404_' => 'welcome/404', // The main 404 route
'admin/detail/:id' => array('admin/detail/$1', 'name' => 'detail'),
'admin/write' => '/admin/write/',
'test'=> 'api/test',
'blog'=> 'api/blog',
'blog/insert'=> 'api/blog/insert',
'blog/update'=> 'api/blog/update',
'admin/list/'=> '/admin/list',
'admin/edit/:id' => array('admin/edit/$1', 'name' => 'edit'),
'admin/view/:id' => array('admin/view/$1', 'name' => 'view'),
'test/insert'=> 'api/test/insert',
'hello(/:name)?' => array('welcome/hello', 'name' => 'hello'),
);
| yyoshinori/CastlePortal | fuel/app/config/routes.php | PHP | mit | 636 |
<!-- Modal -->
<div class="modal fade" id="explor_post" tabindex="-1" role="dialog" aria-labelledby="post_structlLabel">
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">×</span></button>
<h4 class="modal-title" id="myModalLabel">Organization Position</h4>
</div>
<div class="modal-body" id="post_exp">
</div>
<div class="modal-footer">
<button type="button" class="btn btn-default" data-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
<script>
$('#explor_post').on('show.bs.modal', function (e) {
StructOrgPost(1);
})
$('#explor_post').on('click', '.nav-open', function(event) {
event.preventDefault();
var id = $(this).data('id');
StructOrgPost(id);
});
$('#explor_post').on('click', '.nav-select', function(event) {
event.preventDefault();
var id = $(this).data('id');
var text = $(this).data('text');
$('#txt_post').val(text);
$('#hdn_post').val(id);
});
function StructOrgPost(id) {
var ajaxUrl = siteUrl + '/Om/Ajax/ShowOrgStrucSelection' ;
$.ajax({
url: ajaxUrl,
type: 'POST',
dataType: 'html',
data: {id: id,mode:'post'}
})
.done(function(respond) {
$('#post_exp').html(respond);
});
}
</script>
| freon-lunarion/dew | application/modules/om/views/_element/orgPostStruct_modal.php | PHP | mit | 1,450 |
using System.Collections.ObjectModel;
using ActiproSoftware.Text;
using ActiproSoftware.Text.Utility;
using ActiproSoftware.Windows.Controls.SyntaxEditor;
using ActiproSoftware.Windows.Controls.SyntaxEditor.IntelliPrompt.Implementation;
using NQuery.Authoring.ActiproWpf.SymbolContent;
using NQuery.Authoring.ActiproWpf.Text;
using NQuery.Authoring.QuickInfo;
namespace NQuery.Authoring.ActiproWpf.QuickInfo
{
internal sealed class NQueryQuickInfoProvider : QuickInfoProviderBase, INQueryQuickInfoProvider
{
private readonly IServiceLocator _serviceLocator;
public NQueryQuickInfoProvider(IServiceLocator serviceLocator)
{
_serviceLocator = serviceLocator;
}
private INQuerySymbolContentProvider SymbolContentProvider
{
get { return _serviceLocator.GetService<INQuerySymbolContentProvider>(); }
}
public Collection<IQuickInfoModelProvider> Providers { get; } = new();
public override object GetContext(IEditorView view, int offset)
{
var documentView = view.SyntaxEditor.GetDocumentView();
var document = documentView.Document;
if (!document.TryGetSemanticModel(out var semanticModel))
return null;
var snapshot = document.Text.ToTextSnapshot();
var snapshotOffset = new TextSnapshotOffset(snapshot, offset);
var position = snapshotOffset.ToOffset();
var model = semanticModel.GetQuickInfoModel(position, Providers);
return model;
}
protected override bool RequestSession(IEditorView view, object context)
{
if (context is not QuickInfoModel model)
return false;
var text = model.SemanticModel.SyntaxTree.Text;
var textSnapshotRange = text.ToSnapshotRange(model.Span);
var textRange = textSnapshotRange.TextRange;
var content = SymbolContentProvider.GetContentProvider(model.Glyph, model.Markup).GetContent();
var quickInfoSession = new QuickInfoSession();
quickInfoSession.Context = context;
quickInfoSession.Content = content;
quickInfoSession.Open(view, textRange);
return true;
}
protected override IEnumerable<Type> ContextTypes
{
get { return new[] { typeof(QuickInfoModel) }; }
}
}
} | terrajobst/nquery-vnext | src/NQuery.Authoring.ActiproWpf/QuickInfo/NQueryQuickInfoProvider.cs | C# | mit | 2,436 |
#include <iostream>
using namespace std;
void display(const int *xPos, const int *yPos);
void move(int *xPos, int *yPos);
int main(void) {
int x = 10;
int y = 20;
display(&x, &y);
move(&x, &y);
display(&x, &y);
return 0;
}
void display(const int *xPos, const int *yPos) {
// btw const not needed for this
cout << "Current position [" << *xPos << ", " << *yPos << "]" << endl;
}
void move(int *xPos, int *yPos) {
*xPos = *xPos + 1;
*yPos = *yPos + 1;
} | BedrockDev/Sunrin2017 | Software/Game Programming/Codes/ConsoleApplication5/ConsoleApplication5/ConstPointer.cpp | C++ | mit | 468 |
class Room < ActiveRecord::Base
validates :name, :presence => true
has_many :users
has_many :room_logs
end
| ichylinux/chat | app/models/room.rb | Ruby | mit | 114 |
Template.friendPosts.onCreated(function() {
Bisia.Notification.resetNotify('note', 'post');
})
Template.friendPosts.helpers({
getPost: function(postId) {
var post = Posts.findOne(postId);
if (post) {
var user = Users.findOne({ '_id': post.authorId }, { 'fields': {
'username': 1,
'profile.city': 1,
'profile.gender': 1,
'profile.status': 1,
'profile.avatar': 1,
'profile.online': 1,
'profile.birthday': 1
}});
post.showHeader = true;
post.usern = user.username;
post.profile = user.profile;
return post;
}
},
detectFirstPage: function() {
var increment = Bisia.getController('increment');
var limit = Bisia.getController('params')['pageLimit'];
// Don't show spinner by default
var pageDisplay = true;
// If we are on the first page...
if (!limit || limit == increment) {
// pageDisplay becomes reactive
pageDisplay = this.pageReady;
}
// Add pageDisplay to this
return _.extend(this, {
pageDisplay: pageDisplay
});
}
});
Template.friendPosts.events({
'scroll .content': function(e, t) {
Bisia.Ui.toggleAtBottom(e, '#helpbars', 'bottom-show');
}
}); | redbaron76/Bisiacaria.com | client/pages/friend_posts.js | JavaScript | mit | 1,141 |
import { all, takeEvery } from 'redux-saga/effects';
import actions from '#actions';
import handleShareFormChange from './startAlbumsSharingService/handleShareFormChange';
import handleShareFormSubmit from './startAlbumsSharingService/handleShareFormSubmit';
import handleShareItemsSelect from './startAlbumsSharingService/handleShareItemsSelect';
function* startAlbumsSharingService(apis) {
yield all([
takeEvery(actions.uiShareItemsSelected, handleShareItemsSelect, apis),
takeEvery(actions.uiShareFormSubmited, handleShareFormSubmit, apis),
takeEvery(actions.uiShareFormChanged, handleShareFormChange, apis),
]);
}
export default startAlbumsSharingService;
| pathephone/pathephone-desktop | src/renderer/sagas/startApp/startServices/startAlbumsSharingService.js | JavaScript | mit | 680 |
var fs = require('fs'),
eol = require('eol'),
path = require('path'),
mkdirp = require('mkdirp'),
watch = require('watch');
var specialFiles = {
'welcome.md': function(fileContent, consoleContent) {
consoleContent.welcome = processFileContent(fileContent);
},
'config.json': function(fileContent, consoleContent) {
var config = JSON.parse(fileContent);
consoleContent.executables = config.executables;
consoleContent.__config = config;
}
};
function processFileContent(fileContent) {
fileContent = eol.lf(fileContent);
fileContent = new Buffer(fileContent).toString('base64');
return fileContent;
}
function createContentFile(fileName, fileContent) {
var parsed = path.parse(fileName);
return {
content: processFileContent(fileContent),
base: fileName,
name: parsed.name,
ext: parsed.ext
};
}
function hasExecutableForFile(executables, fileName) {
if (!executables) {
return false;
}
for (var i in executables) {
if (executables[i].file === fileName) {
return true;
}
}
return false;
}
function setFilePermissions(files, config) {
if (!files) {
return;
}
for (var fileName in files) {
var file = files[fileName];
if (!config.noRead || config.noRead.indexOf(file.base) === -1) {
file.readable = true;
}
if (config.writeable && config.writeable.indexOf(file.base) !== -1) {
file.writeable = true;
}
if (hasExecutableForFile(config.executables, file.base)) {
file.executable = true;
}
}
}
/**
* This badass here reads all files from the console folders and creates the content.json file.
*/
function createConsoleContent() {
var srcPath = './src/content';
var targetPath = './dist/content';
var consoleFolders = fs.readdirSync(srcPath);
if (!consoleFolders) {
return;
}
consoleFolders.forEach(function(folderName) {
var consoleSrcPath = srcPath + '/' + folderName;
var consoleTargetPath = targetPath + '/' + folderName;
var stats = fs.statSync(consoleSrcPath);
if (!stats.isDirectory()) {
return;
}
var files = fs.readdirSync(consoleSrcPath);
if (!files || files.length === 0) {
console.log('No files found for ' + consoleSrcPath);
} else {
console.log('Processing content ' + folderName);
var consoleContent = {
files: {}
};
files.forEach(function(file) {
var fileContent = fs.readFileSync(consoleSrcPath + '/' + file, 'utf8');
if (specialFiles[file]) {
specialFiles[file](fileContent, consoleContent);
} else {
consoleContent.files[file] = createContentFile(file, fileContent);
}
});
if (consoleContent.__config) {
setFilePermissions(consoleContent.files, consoleContent.__config);
delete consoleContent.__config;
}
mkdirp.sync(consoleTargetPath);
fs.writeFileSync(consoleTargetPath + '/content.json', JSON.stringify(consoleContent), 'utf8');
}
});
}
if (process.argv.indexOf('--watching') !== -1) {
watch.watchTree('./src/content', function() {
createConsoleContent();
});
}
else {
createConsoleContent();
} | furti/mighty-quest-for-tux | parse-content.js | JavaScript | mit | 3,541 |
<?php
include_once('conexao.class.php');
class Disciplina{
public $id_disciplina;
public $nome;
public $professor;
public $curso;
public $carga_horaria;
public function __construct(){
//print "Disciplina instanciada!";
}
public function gravar(){
$sql = "insert into disciplina (nome, professor, curso, carga_horaria) values (?,?,?,?)";
$con = new Conexao();
$stm = $con->prepare($sql);
$stm->bindParam(1, $this->nome);
$stm->bindParam(2, $this->professor);
$stm->bindParam(3, $this->curso);
$stm->bindParam(4, $this->carga_horaria);
$stm->execute();
//echo "gravado";
}
public function __get($var){
return $this->$var;
}
public function __set($var, $valor){
$this->$var = $valor;
}
public function listar(){
$sql = "select * from disciplina";
$con = new Conexao();
$stm = $con->prepare($sql);
$stm->execute();
return $stm;
}
}
?> | STRVIRTU/tcc-2017 | less/disciplina.class.php | PHP | mit | 960 |
//
// MIT License
//
// Copyright 2019
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//
#include "controller.hh"
#include "log.hh"
#include <boost/core/ignore_unused.hpp>
#include <boost/static_assert.hpp>
#include <map>
using namespace jones;
using button = jones::controller::button;
using button_state = jones::controller::button_state;
using button_state_map = std::map<button, button_state>;
namespace {
auto position_to_button(const uint8_t position) -> button {
switch (position) {
case 0:
return button::BUTTON_A;
case 1:
return button::BUTTON_B;
case 2:
return button::BUTTON_SELECT;
case 3:
return button::BUTTON_START;
case 4:
return button::BUTTON_UP;
case 5:
return button::BUTTON_DOWN;
case 6:
return button::BUTTON_LEFT;
case 7:
return button::BUTTON_RIGHT;
default:
BOOST_STATIC_ASSERT("unexpected button found");
return button::BUTTON_INVALID;
}
}
} // namespace
auto controller::button_to_string(const button button) -> auto {
switch (button) {
case button::BUTTON_A:
return "BUTTON_A";
case button::BUTTON_B:
return "BUTTON_B";
case button::BUTTON_SELECT:
return "BUTTON_SELECT";
case button::BUTTON_START:
return "BUTTON_START";
case button::BUTTON_UP:
return "BUTTON_UP";
case button::BUTTON_DOWN:
return "BUTTON_DOWN";
case button::BUTTON_LEFT:
return "BUTTON_LEFT";
case button::BUTTON_RIGHT:
return "BUTTON_RIGHT";
default:
return "BUTTON_INVALID";
}
}
auto controller::button_state_to_string(const button_state button_state) -> auto {
switch (button_state) {
case button_state::BUTTON_STATE_DOWN:
return "BUTTON_STATE_DOWN";
case button_state::BUTTON_STATE_UP:
return "BUTTON_STATE_UP";
default:
return "BUTTON_STATE_INVALID";
}
}
auto controller::controller_state_to_string(const controller_state controller_state) -> auto {
switch (controller_state) {
case controller_state::CONTROLLER_STATE_CONNECTED:
return "CONTROLLER_STATE_CONNECTED";
case controller_state::CONTROLLER_STATE_DISCONNECTED:
return "CONTROLLER_STATE_DISCONNECTED";
default:
return "CONTROLLER_STATE_INVALID";
}
}
class controller::controller::impl {
public:
explicit impl(const memory &memory)
: memory_(memory), strobe_(0), index_(0), button_states_(), controller_state_(controller_state::CONTROLLER_STATE_DISCONNECTED) {
boost::ignore_unused(memory_);
}
~impl() = default;
auto set_button_state(const button button, const button_state button_state) -> void {
button_states_[button] = button_state;
LOG_DEBUG << "controller::set_button_state : "
<< "button [" << button_to_string(button) << "] "
<< "button_state [" << button_state_to_string(button_state) << "]";
}
auto get_button_state(const button button) -> auto {
return button_states_[button];
}
auto set_controller_state(const controller_state controller_state) -> void {
controller_state_ = controller_state;
LOG_DEBUG << "controller::set_controller_state : "
<< "controller_state [" << controller_state_to_string(controller_state) << "]";
}
auto get_controller_state() -> auto {
return controller_state_;
}
auto peek(uint16_t const address) const -> uint8_t {
boost::ignore_unused(address);
uint8_t data = 0;
if (index_ < 8) {
auto const button_state = button_states_.find(position_to_button(index_));
if (button_state != button_states_.end() && button_state->second == button_state::BUTTON_STATE_DOWN) {
data = 1;
}
}
return data;
}
auto read(const uint16_t address) -> uint8_t {
auto const data = peek(address);
index_++;
update_button_index();
return data;
}
auto write(uint16_t const address, uint8_t const data) -> void {
boost::ignore_unused(address);
strobe_ = data;
update_button_index();
}
private:
auto update_button_index() -> void {
if ((strobe_ & 0x1U) == 1) {
index_ = 0;
}
}
private:
const memory &memory_;
uint8_t strobe_;
uint8_t index_;
button_state_map button_states_;
controller_state controller_state_;
};
controller::controller::controller(memory const &memory)
: impl_(std::make_unique<impl>(memory)) {
}
controller::controller::~controller() = default;
auto controller::controller::set_button_state(button const button, button_state const state) -> void {
impl_->set_button_state(button, state);
}
auto controller::controller::get_button_state(button const button) const -> button_state {
return impl_->get_button_state(button);
}
auto controller::controller::set_controller_state(controller_state const state) -> void {
impl_->set_controller_state(state);
}
auto controller::controller::get_controller_state() const -> controller_state {
return impl_->get_controller_state();
}
auto controller::controller::peek(uint16_t const address) const -> uint8_t {
return impl_->peek(address);
}
auto controller::controller::read(uint16_t const address) const -> uint8_t {
return impl_->read(address);
}
auto controller::controller::write(uint16_t const address, uint8_t const data) -> void {
impl_->write(address, data);
}
| thejunkjon/jones | source/jones/controller/controller.cc | C++ | mit | 6,251 |
export const Camera = `
<svg viewBox="0 0 28 28">
<g fill="none" fill-rule="evenodd">
<path d="M3 3h22a2 2 0 012 2v18a2 2 0 01-2 2H3a2 2 0 01-2-2V5a2 2 0 012-2z" stroke="currentColor"/>
<circle stroke="currentColor" cx="14" cy="14" r="5"/>
<path d="M22 7h1" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round"/>
</g>
</svg>`;
| clair-design/clair | packages/icons/icons/Camera.ts | TypeScript | mit | 359 |
import datetime
import time
import boto
import redis
import requests
import random
import zlib
from django.shortcuts import get_object_or_404
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.template.loader import render_to_string
from django.db import IntegrityError
from django.db.models import Q
from django.views.decorators.cache import never_cache
from django.core.urlresolvers import reverse
from django.contrib.auth import login as login_user
from django.contrib.auth import logout as logout_user
from django.contrib.auth.models import User
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseForbidden, Http404
from django.conf import settings
from django.core.mail import mail_admins
from django.core.validators import email_re
from django.core.mail import EmailMultiAlternatives
from django.contrib.sites.models import Site
from django.utils import feedgenerator
from mongoengine.queryset import OperationError
from mongoengine.queryset import NotUniqueError
from apps.recommendations.models import RecommendedFeed
from apps.analyzer.models import MClassifierTitle, MClassifierAuthor, MClassifierFeed, MClassifierTag
from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds
from apps.analyzer.models import apply_classifier_authors, apply_classifier_tags
from apps.analyzer.models import get_classifiers_for_user, sort_classifiers_by_feed
from apps.profile.models import Profile
from apps.reader.models import UserSubscription, UserSubscriptionFolders, RUserStory, Feature
from apps.reader.forms import SignupForm, LoginForm, FeatureForm
from apps.rss_feeds.models import MFeedIcon, MStarredStoryCounts
from apps.search.models import MUserSearch
from apps.statistics.models import MStatistics
# from apps.search.models import SearchStarredStory
try:
from apps.rss_feeds.models import Feed, MFeedPage, DuplicateFeed, MStory, MStarredStory
except:
pass
from apps.social.models import MSharedStory, MSocialProfile, MSocialServices
from apps.social.models import MSocialSubscription, MActivity, MInteraction
from apps.categories.models import MCategory
from apps.social.views import load_social_page
from apps.rss_feeds.tasks import ScheduleImmediateFetches
from utils import json_functions as json
from utils.user_functions import get_user, ajax_login_required
from utils.feed_functions import relative_timesince
from utils.story_functions import format_story_link_date__short
from utils.story_functions import format_story_link_date__long
from utils.story_functions import strip_tags
from utils import log as logging
from utils.view_functions import get_argument_or_404, render_to, is_true
from utils.view_functions import required_params
from utils.ratelimit import ratelimit
from vendor.timezones.utilities import localtime_for_timezone
BANNED_URLS = [
"brentozar.com",
]
@never_cache
@render_to('reader/dashboard.xhtml')
def index(request, **kwargs):
if request.method == "GET" and request.subdomain and request.subdomain not in ['dev', 'www', 'debug']:
username = request.subdomain
try:
if '.' in username:
username = username.split('.')[0]
user = User.objects.get(username__iexact=username)
except User.DoesNotExist:
return HttpResponseRedirect('http://%s%s' % (
Site.objects.get_current().domain,
reverse('index')))
return load_social_page(request, user_id=user.pk, username=request.subdomain, **kwargs)
if request.user.is_anonymous():
return welcome(request, **kwargs)
else:
return dashboard(request, **kwargs)
def dashboard(request, **kwargs):
user = request.user
feed_count = UserSubscription.objects.filter(user=request.user).count()
recommended_feeds = RecommendedFeed.objects.filter(is_public=True,
approved_date__lte=datetime.datetime.now()
).select_related('feed')[:2]
unmoderated_feeds = []
if user.is_staff:
unmoderated_feeds = RecommendedFeed.objects.filter(is_public=False,
declined_date__isnull=True
).select_related('feed')[:2]
statistics = MStatistics.all()
social_profile = MSocialProfile.get_user(user.pk)
start_import_from_google_reader = request.session.get('import_from_google_reader', False)
if start_import_from_google_reader:
del request.session['import_from_google_reader']
if not user.is_active:
url = "https://%s%s" % (Site.objects.get_current().domain,
reverse('stripe-form'))
return HttpResponseRedirect(url)
logging.user(request, "~FBLoading dashboard")
return {
'user_profile' : user.profile,
'feed_count' : feed_count,
'account_images' : range(1, 4),
'recommended_feeds' : recommended_feeds,
'unmoderated_feeds' : unmoderated_feeds,
'statistics' : statistics,
'social_profile' : social_profile,
'start_import_from_google_reader': start_import_from_google_reader,
'debug' : settings.DEBUG,
}, "reader/dashboard.xhtml"
def welcome(request, **kwargs):
user = get_user(request)
statistics = MStatistics.all()
social_profile = MSocialProfile.get_user(user.pk)
if request.method == "POST":
if request.POST.get('submit', '').startswith('log'):
login_form = LoginForm(request.POST, prefix='login')
signup_form = SignupForm(prefix='signup')
else:
login_form = LoginForm(prefix='login')
signup_form = SignupForm(request.POST, prefix='signup')
else:
login_form = LoginForm(prefix='login')
signup_form = SignupForm(prefix='signup')
logging.user(request, "~FBLoading welcome")
return {
'user_profile' : hasattr(user, 'profile') and user.profile,
'login_form' : login_form,
'signup_form' : signup_form,
'statistics' : statistics,
'social_profile' : social_profile,
'post_request' : request.method == 'POST',
}, "reader/welcome.xhtml"
@never_cache
def login(request):
code = -1
message = ""
if request.method == "POST":
form = LoginForm(request.POST, prefix='login')
if form.is_valid():
login_user(request, form.get_user())
if request.POST.get('api'):
logging.user(form.get_user(), "~FG~BB~SKiPhone Login~FW")
code = 1
else:
logging.user(form.get_user(), "~FG~BBLogin~FW")
return HttpResponseRedirect(reverse('index'))
else:
message = form.errors.items()[0][1][0]
if request.POST.get('api'):
return HttpResponse(json.encode(dict(code=code, message=message)), mimetype='application/json')
else:
return index(request)
@never_cache
def signup(request):
if request.method == "POST":
form = SignupForm(prefix='signup', data=request.POST)
if form.is_valid():
new_user = form.save()
login_user(request, new_user)
logging.user(new_user, "~FG~SB~BBNEW SIGNUP: ~FW%s" % new_user.email)
if not new_user.is_active:
url = "https://%s%s" % (Site.objects.get_current().domain,
reverse('stripe-form'))
return HttpResponseRedirect(url)
return index(request)
@never_cache
def logout(request):
logging.user(request, "~FG~BBLogout~FW")
logout_user(request)
if request.GET.get('api'):
return HttpResponse(json.encode(dict(code=1)), mimetype='application/json')
else:
return HttpResponseRedirect(reverse('index'))
def autologin(request, username, secret):
next = request.GET.get('next', '')
if not username or not secret:
return HttpResponseForbidden()
profile = Profile.objects.filter(user__username=username, secret_token=secret)
if not profile:
return HttpResponseForbidden()
user = profile[0].user
user.backend = settings.AUTHENTICATION_BACKENDS[0]
login_user(request, user)
logging.user(user, "~FG~BB~SKAuto-Login. Next stop: %s~FW" % (next if next else 'Homepage',))
if next and not next.startswith('/'):
next = '?next=' + next
return HttpResponseRedirect(reverse('index') + next)
elif next:
return HttpResponseRedirect(next)
else:
return HttpResponseRedirect(reverse('index'))
@ratelimit(minutes=1, requests=24)
@never_cache
@json.json_view
def load_feeds(request):
user = get_user(request)
feeds = {}
include_favicons = request.REQUEST.get('include_favicons', False)
flat = request.REQUEST.get('flat', False)
update_counts = request.REQUEST.get('update_counts', False)
version = int(request.REQUEST.get('v', 1))
if include_favicons == 'false': include_favicons = False
if update_counts == 'false': update_counts = False
if flat == 'false': flat = False
if flat: return load_feeds_flat(request)
try:
folders = UserSubscriptionFolders.objects.get(user=user)
except UserSubscriptionFolders.DoesNotExist:
data = dict(feeds=[], folders=[])
return data
except UserSubscriptionFolders.MultipleObjectsReturned:
UserSubscriptionFolders.objects.filter(user=user)[1:].delete()
folders = UserSubscriptionFolders.objects.get(user=user)
user_subs = UserSubscription.objects.select_related('feed').filter(user=user)
day_ago = datetime.datetime.now() - datetime.timedelta(days=1)
scheduled_feeds = []
for sub in user_subs:
pk = sub.feed_id
if update_counts and sub.needs_unread_recalc:
sub.calculate_feed_scores(silent=True)
feeds[pk] = sub.canonical(include_favicon=include_favicons)
if not sub.active: continue
if not sub.feed.active and not sub.feed.has_feed_exception:
scheduled_feeds.append(sub.feed.pk)
elif sub.feed.active_subscribers <= 0:
scheduled_feeds.append(sub.feed.pk)
elif sub.feed.next_scheduled_update < day_ago:
scheduled_feeds.append(sub.feed.pk)
if len(scheduled_feeds) > 0 and request.user.is_authenticated():
logging.user(request, "~SN~FMTasking the scheduling immediate fetch of ~SB%s~SN feeds..." %
len(scheduled_feeds))
ScheduleImmediateFetches.apply_async(kwargs=dict(feed_ids=scheduled_feeds, user_id=user.pk))
starred_counts, starred_count = MStarredStoryCounts.user_counts(user.pk, include_total=True)
if not starred_count and len(starred_counts):
starred_count = MStarredStory.objects(user_id=user.pk).count()
social_params = {
'user_id': user.pk,
'include_favicon': include_favicons,
'update_counts': update_counts,
}
social_feeds = MSocialSubscription.feeds(**social_params)
social_profile = MSocialProfile.profile(user.pk)
social_services = MSocialServices.profile(user.pk)
categories = None
if not user_subs:
categories = MCategory.serialize()
logging.user(request, "~FB~SBLoading ~FY%s~FB/~FM%s~FB feeds/socials%s" % (
len(feeds.keys()), len(social_feeds), '. ~FCUpdating counts.' if update_counts else ''))
data = {
'feeds': feeds.values() if version == 2 else feeds,
'social_feeds': social_feeds,
'social_profile': social_profile,
'social_services': social_services,
'user_profile': user.profile,
"is_staff": user.is_staff,
'folders': json.decode(folders.folders),
'starred_count': starred_count,
'starred_counts': starred_counts,
'categories': categories
}
return data
@json.json_view
def load_feed_favicons(request):
user = get_user(request)
feed_ids = request.REQUEST.getlist('feed_ids')
if not feed_ids:
user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True)
feed_ids = [sub['feed__pk'] for sub in user_subs.values('feed__pk')]
feed_icons = dict([(i.feed_id, i.data) for i in MFeedIcon.objects(feed_id__in=feed_ids)])
return feed_icons
def load_feeds_flat(request):
user = request.user
include_favicons = is_true(request.REQUEST.get('include_favicons', False))
update_counts = is_true(request.REQUEST.get('update_counts', True))
feeds = {}
day_ago = datetime.datetime.now() - datetime.timedelta(days=1)
scheduled_feeds = []
iphone_version = "2.1"
if include_favicons == 'false': include_favicons = False
if update_counts == 'false': update_counts = False
if not user.is_authenticated():
return HttpResponseForbidden()
try:
folders = UserSubscriptionFolders.objects.get(user=user)
except UserSubscriptionFolders.DoesNotExist:
folders = []
user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True)
if not user_subs and folders:
folders.auto_activate()
user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True)
for sub in user_subs:
if update_counts and sub.needs_unread_recalc:
sub.calculate_feed_scores(silent=True)
feeds[sub.feed_id] = sub.canonical(include_favicon=include_favicons)
if not sub.feed.active and not sub.feed.has_feed_exception:
scheduled_feeds.append(sub.feed.pk)
elif sub.feed.active_subscribers <= 0:
scheduled_feeds.append(sub.feed.pk)
elif sub.feed.next_scheduled_update < day_ago:
scheduled_feeds.append(sub.feed.pk)
if len(scheduled_feeds) > 0 and request.user.is_authenticated():
logging.user(request, "~SN~FMTasking the scheduling immediate fetch of ~SB%s~SN feeds..." %
len(scheduled_feeds))
ScheduleImmediateFetches.apply_async(kwargs=dict(feed_ids=scheduled_feeds, user_id=user.pk))
flat_folders = []
if folders:
flat_folders = folders.flatten_folders(feeds=feeds)
social_params = {
'user_id': user.pk,
'include_favicon': include_favicons,
'update_counts': update_counts,
}
social_feeds = MSocialSubscription.feeds(**social_params)
social_profile = MSocialProfile.profile(user.pk)
social_services = MSocialServices.profile(user.pk)
starred_counts, starred_count = MStarredStoryCounts.user_counts(user.pk, include_total=True)
if not starred_count and len(starred_counts):
starred_count = MStarredStory.objects(user_id=user.pk).count()
categories = None
if not user_subs:
categories = MCategory.serialize()
logging.user(request, "~FB~SBLoading ~FY%s~FB/~FM%s~FB feeds/socials ~FMflat~FB%s" % (
len(feeds.keys()), len(social_feeds), '. ~FCUpdating counts.' if update_counts else ''))
data = {
"flat_folders": flat_folders,
"feeds": feeds,
"social_feeds": social_feeds,
"social_profile": social_profile,
"social_services": social_services,
"user": user.username,
"is_staff": user.is_staff,
"user_profile": user.profile,
"iphone_version": iphone_version,
"categories": categories,
'starred_count': starred_count,
'starred_counts': starred_counts,
}
return data
@ratelimit(minutes=1, requests=10)
@never_cache
@json.json_view
def refresh_feeds(request):
user = get_user(request)
feed_ids = request.REQUEST.getlist('feed_id')
check_fetch_status = request.REQUEST.get('check_fetch_status')
favicons_fetching = request.REQUEST.getlist('favicons_fetching')
social_feed_ids = [feed_id for feed_id in feed_ids if 'social:' in feed_id]
feed_ids = list(set(feed_ids) - set(social_feed_ids))
feeds = {}
if feed_ids or (not social_feed_ids and not feed_ids):
feeds = UserSubscription.feeds_with_updated_counts(user, feed_ids=feed_ids,
check_fetch_status=check_fetch_status)
social_feeds = {}
if social_feed_ids or (not social_feed_ids and not feed_ids):
social_feeds = MSocialSubscription.feeds_with_updated_counts(user, social_feed_ids=social_feed_ids)
favicons_fetching = [int(f) for f in favicons_fetching if f]
feed_icons = {}
if favicons_fetching:
feed_icons = dict([(i.feed_id, i) for i in MFeedIcon.objects(feed_id__in=favicons_fetching)])
for feed_id, feed in feeds.items():
if feed_id in favicons_fetching and feed_id in feed_icons:
feeds[feed_id]['favicon'] = feed_icons[feed_id].data
feeds[feed_id]['favicon_color'] = feed_icons[feed_id].color
feeds[feed_id]['favicon_fetching'] = feed.get('favicon_fetching')
user_subs = UserSubscription.objects.filter(user=user, active=True).only('feed')
sub_feed_ids = [s.feed_id for s in user_subs]
if favicons_fetching:
moved_feed_ids = [f for f in favicons_fetching if f not in sub_feed_ids]
for moved_feed_id in moved_feed_ids:
duplicate_feeds = DuplicateFeed.objects.filter(duplicate_feed_id=moved_feed_id)
if duplicate_feeds and duplicate_feeds[0].feed.pk in feeds:
feeds[moved_feed_id] = feeds[duplicate_feeds[0].feed_id]
feeds[moved_feed_id]['dupe_feed_id'] = duplicate_feeds[0].feed_id
if check_fetch_status:
missing_feed_ids = list(set(feed_ids) - set(sub_feed_ids))
if missing_feed_ids:
duplicate_feeds = DuplicateFeed.objects.filter(duplicate_feed_id__in=missing_feed_ids)
for duplicate_feed in duplicate_feeds:
feeds[duplicate_feed.duplicate_feed_id] = {'id': duplicate_feed.feed_id}
interactions_count = MInteraction.user_unread_count(user.pk)
if True or settings.DEBUG or check_fetch_status:
logging.user(request, "~FBRefreshing %s feeds (%s/%s)" % (
len(feeds.keys()), check_fetch_status, len(favicons_fetching)))
return {
'feeds': feeds,
'social_feeds': social_feeds,
'interactions_count': interactions_count,
}
@json.json_view
def interactions_count(request):
user = get_user(request)
interactions_count = MInteraction.user_unread_count(user.pk)
return {
'interactions_count': interactions_count,
}
@never_cache
@ajax_login_required
@json.json_view
def feed_unread_count(request):
user = request.user
feed_ids = request.REQUEST.getlist('feed_id')
force = request.REQUEST.get('force', False)
social_feed_ids = [feed_id for feed_id in feed_ids if 'social:' in feed_id]
feed_ids = list(set(feed_ids) - set(social_feed_ids))
feeds = {}
if feed_ids:
feeds = UserSubscription.feeds_with_updated_counts(user, feed_ids=feed_ids, force=force)
social_feeds = {}
if social_feed_ids:
social_feeds = MSocialSubscription.feeds_with_updated_counts(user, social_feed_ids=social_feed_ids)
if len(feed_ids) == 1:
if settings.DEBUG:
feed_title = Feed.get_by_id(feed_ids[0]).feed_title
else:
feed_title = feed_ids[0]
elif len(social_feed_ids) == 1:
feed_title = MSocialProfile.objects.get(user_id=social_feed_ids[0].replace('social:', '')).username
else:
feed_title = "%s feeds" % (len(feeds) + len(social_feeds))
logging.user(request, "~FBUpdating unread count on: %s" % feed_title)
return {'feeds': feeds, 'social_feeds': social_feeds}
def refresh_feed(request, feed_id):
user = get_user(request)
feed = get_object_or_404(Feed, pk=feed_id)
feed = feed.update(force=True, compute_scores=False)
usersub = UserSubscription.objects.get(user=user, feed=feed)
usersub.calculate_feed_scores(silent=False)
logging.user(request, "~FBRefreshing feed: %s" % feed)
return load_single_feed(request, feed_id)
@never_cache
@json.json_view
def load_single_feed(request, feed_id):
start = time.time()
user = get_user(request)
# offset = int(request.REQUEST.get('offset', 0))
# limit = int(request.REQUEST.get('limit', 6))
limit = 6
page = int(request.REQUEST.get('page', 1))
offset = limit * (page-1)
order = request.REQUEST.get('order', 'newest')
read_filter = request.REQUEST.get('read_filter', 'all')
query = request.REQUEST.get('query')
include_story_content = is_true(request.REQUEST.get('include_story_content', True))
include_hidden = is_true(request.REQUEST.get('include_hidden', False))
message = None
user_search = None
dupe_feed_id = None
user_profiles = []
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
if not feed_id: raise Http404
feed_address = request.REQUEST.get('feed_address')
feed = Feed.get_by_id(feed_id, feed_address=feed_address)
if not feed:
raise Http404
try:
usersub = UserSubscription.objects.get(user=user, feed=feed)
except UserSubscription.DoesNotExist:
usersub = None
if query:
if user.profile.is_premium:
user_search = MUserSearch.get_user(user.pk)
user_search.touch_search_date()
stories = feed.find_stories(query, order=order, offset=offset, limit=limit)
else:
stories = []
message = "You must be a premium subscriber to search."
elif read_filter == 'starred':
mstories = MStarredStory.objects(
user_id=user.pk,
story_feed_id=feed_id
).order_by('%sstarred_date' % ('-' if order == 'newest' else ''))[offset:offset+limit]
stories = Feed.format_stories(mstories)
elif usersub and (read_filter == 'unread' or order == 'oldest'):
stories = usersub.get_stories(order=order, read_filter=read_filter, offset=offset, limit=limit,
default_cutoff_date=user.profile.unread_cutoff)
else:
stories = feed.get_stories(offset, limit)
checkpoint1 = time.time()
try:
stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk)
except redis.ConnectionError:
logging.user(request, "~BR~FK~SBRedis is unavailable for shared stories.")
checkpoint2 = time.time()
# Get intelligence classifier for user
if usersub and usersub.is_trained:
classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, feed_id=feed_id, social_user_id=0))
classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, feed_id=feed_id))
classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, feed_id=feed_id))
classifier_tags = list(MClassifierTag.objects(user_id=user.pk, feed_id=feed_id))
else:
classifier_feeds = []
classifier_authors = []
classifier_titles = []
classifier_tags = []
classifiers = get_classifiers_for_user(user, feed_id=feed_id,
classifier_feeds=classifier_feeds,
classifier_authors=classifier_authors,
classifier_titles=classifier_titles,
classifier_tags=classifier_tags)
checkpoint3 = time.time()
unread_story_hashes = []
if stories:
if (read_filter == 'all' or query) and usersub:
unread_story_hashes = UserSubscription.story_hashes(user.pk, read_filter='unread',
feed_ids=[usersub.feed_id],
usersubs=[usersub],
group_by_feed=False,
cutoff_date=user.profile.unread_cutoff)
story_hashes = [story['story_hash'] for story in stories if story['story_hash']]
starred_stories = MStarredStory.objects(user_id=user.pk,
story_feed_id=feed.pk,
story_hash__in=story_hashes)\
.only('story_hash', 'starred_date', 'user_tags')
shared_story_hashes = MSharedStory.check_shared_story_hashes(user.pk, story_hashes)
shared_stories = []
if shared_story_hashes:
shared_stories = MSharedStory.objects(user_id=user.pk,
story_hash__in=shared_story_hashes)\
.only('story_hash', 'shared_date', 'comments')
starred_stories = dict([(story.story_hash, dict(starred_date=story.starred_date,
user_tags=story.user_tags))
for story in starred_stories])
shared_stories = dict([(story.story_hash, dict(shared_date=story.shared_date,
comments=story.comments))
for story in shared_stories])
checkpoint4 = time.time()
for story in stories:
if not include_story_content:
del story['story_content']
story_date = localtime_for_timezone(story['story_date'], user.profile.timezone)
nowtz = localtime_for_timezone(now, user.profile.timezone)
story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz)
story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz)
if usersub:
story['read_status'] = 1
if (read_filter == 'all' or query) and usersub:
story['read_status'] = 1 if story['story_hash'] not in unread_story_hashes else 0
elif read_filter == 'unread' and usersub:
story['read_status'] = 0
if story['story_hash'] in starred_stories:
story['starred'] = True
starred_date = localtime_for_timezone(starred_stories[story['story_hash']]['starred_date'],
user.profile.timezone)
story['starred_date'] = format_story_link_date__long(starred_date, now)
story['starred_timestamp'] = starred_date.strftime('%s')
story['user_tags'] = starred_stories[story['story_hash']]['user_tags']
if story['story_hash'] in shared_stories:
story['shared'] = True
shared_date = localtime_for_timezone(shared_stories[story['story_hash']]['shared_date'],
user.profile.timezone)
story['shared_date'] = format_story_link_date__long(shared_date, now)
story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments'])
else:
story['read_status'] = 1
story['intelligence'] = {
'feed': apply_classifier_feeds(classifier_feeds, feed),
'author': apply_classifier_authors(classifier_authors, story),
'tags': apply_classifier_tags(classifier_tags, story),
'title': apply_classifier_titles(classifier_titles, story),
}
story['score'] = UserSubscription.score_story(story['intelligence'])
# Intelligence
feed_tags = json.decode(feed.data.popular_tags) if feed.data.popular_tags else []
feed_authors = json.decode(feed.data.popular_authors) if feed.data.popular_authors else []
if usersub:
usersub.feed_opens += 1
usersub.needs_unread_recalc = True
usersub.save(update_fields=['feed_opens', 'needs_unread_recalc'])
diff1 = checkpoint1-start
diff2 = checkpoint2-start
diff3 = checkpoint3-start
diff4 = checkpoint4-start
timediff = time.time()-start
last_update = relative_timesince(feed.last_update)
time_breakdown = ""
if timediff > 1 or settings.DEBUG:
time_breakdown = "~SN~FR(~SB%.4s/%.4s/%.4s/%.4s~SN)" % (
diff1, diff2, diff3, diff4)
search_log = "~SN~FG(~SB%s~SN) " % query if query else ""
logging.user(request, "~FYLoading feed: ~SB%s%s (%s/%s) %s%s" % (
feed.feed_title[:22], ('~SN/p%s' % page) if page > 1 else '', order, read_filter, search_log, time_breakdown))
if not include_hidden:
hidden_stories_removed = 0
new_stories = []
for story in stories:
if story['score'] >= 0:
new_stories.append(story)
else:
hidden_stories_removed += 1
stories = new_stories
data = dict(stories=stories,
user_profiles=user_profiles,
feed_tags=feed_tags,
feed_authors=feed_authors,
classifiers=classifiers,
updated=last_update,
user_search=user_search,
feed_id=feed.pk,
elapsed_time=round(float(timediff), 2),
message=message)
if not include_hidden: data['hidden_stories_removed'] = hidden_stories_removed
if dupe_feed_id: data['dupe_feed_id'] = dupe_feed_id
if not usersub:
data.update(feed.canonical())
# if not usersub and feed.num_subscribers <= 1:
# data = dict(code=-1, message="You must be subscribed to this feed.")
# if page <= 3:
# import random
# time.sleep(random.randint(2, 4))
# if page == 2:
# assert False
return data
def load_feed_page(request, feed_id):
if not feed_id:
raise Http404
feed = Feed.get_by_id(feed_id)
if feed and feed.has_page and not feed.has_page_exception:
if settings.BACKED_BY_AWS.get('pages_on_node'):
url = "http://%s/original_page/%s" % (
settings.ORIGINAL_PAGE_SERVER,
feed.pk,
)
page_response = requests.get(url)
if page_response.status_code == 200:
response = HttpResponse(page_response.content, mimetype="text/html; charset=utf-8")
response['Content-Encoding'] = 'gzip'
response['Last-Modified'] = page_response.headers.get('Last-modified')
response['Etag'] = page_response.headers.get('Etag')
response['Content-Length'] = str(len(page_response.content))
logging.user(request, "~FYLoading original page, proxied from node: ~SB%s bytes" %
(len(page_response.content)))
return response
if settings.BACKED_BY_AWS['pages_on_s3'] and feed.s3_page:
if settings.PROXY_S3_PAGES:
key = settings.S3_PAGES_BUCKET.get_key(feed.s3_pages_key)
if key:
compressed_data = key.get_contents_as_string()
response = HttpResponse(compressed_data, mimetype="text/html; charset=utf-8")
response['Content-Encoding'] = 'gzip'
logging.user(request, "~FYLoading original page, proxied: ~SB%s bytes" %
(len(compressed_data)))
return response
else:
logging.user(request, "~FYLoading original page, non-proxied")
return HttpResponseRedirect('//%s/%s' % (settings.S3_PAGES_BUCKET_NAME,
feed.s3_pages_key))
data = MFeedPage.get_data(feed_id=feed_id)
if not data or not feed or not feed.has_page or feed.has_page_exception:
logging.user(request, "~FYLoading original page, ~FRmissing")
return render(request, 'static/404_original_page.xhtml', {},
content_type='text/html',
status=404)
logging.user(request, "~FYLoading original page, from the db")
return HttpResponse(data, mimetype="text/html; charset=utf-8")
@json.json_view
def load_starred_stories(request):
user = get_user(request)
offset = int(request.REQUEST.get('offset', 0))
limit = int(request.REQUEST.get('limit', 10))
page = int(request.REQUEST.get('page', 0))
query = request.REQUEST.get('query')
order = request.REQUEST.get('order', 'newest')
tag = request.REQUEST.get('tag')
story_hashes = request.REQUEST.getlist('h')[:100]
version = int(request.REQUEST.get('v', 1))
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
message = None
order_by = '-' if order == "newest" else ""
if page: offset = limit * (page - 1)
if query:
# results = SearchStarredStory.query(user.pk, query)
# story_ids = [result.db_id for result in results]
if user.profile.is_premium:
stories = MStarredStory.find_stories(query, user.pk, tag=tag, offset=offset, limit=limit,
order=order)
else:
stories = []
message = "You must be a premium subscriber to search."
elif tag:
if user.profile.is_premium:
mstories = MStarredStory.objects(
user_id=user.pk,
user_tags__contains=tag
).order_by('%sstarred_date' % order_by)[offset:offset+limit]
stories = Feed.format_stories(mstories)
else:
stories = []
message = "You must be a premium subscriber to read saved stories by tag."
elif story_hashes:
mstories = MStarredStory.objects(
user_id=user.pk,
story_hash__in=story_hashes
).order_by('%sstarred_date' % order_by)[offset:offset+limit]
stories = Feed.format_stories(mstories)
else:
mstories = MStarredStory.objects(
user_id=user.pk
).order_by('%sstarred_date' % order_by)[offset:offset+limit]
stories = Feed.format_stories(mstories)
stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk, check_all=True)
story_hashes = [story['story_hash'] for story in stories]
story_feed_ids = list(set(s['story_feed_id'] for s in stories))
usersub_ids = UserSubscription.objects.filter(user__pk=user.pk, feed__pk__in=story_feed_ids).values('feed__pk')
usersub_ids = [us['feed__pk'] for us in usersub_ids]
unsub_feed_ids = list(set(story_feed_ids).difference(set(usersub_ids)))
unsub_feeds = Feed.objects.filter(pk__in=unsub_feed_ids)
unsub_feeds = dict((feed.pk, feed.canonical(include_favicon=False)) for feed in unsub_feeds)
shared_story_hashes = MSharedStory.check_shared_story_hashes(user.pk, story_hashes)
shared_stories = []
if shared_story_hashes:
shared_stories = MSharedStory.objects(user_id=user.pk,
story_hash__in=shared_story_hashes)\
.only('story_hash', 'shared_date', 'comments')
shared_stories = dict([(story.story_hash, dict(shared_date=story.shared_date,
comments=story.comments))
for story in shared_stories])
nowtz = localtime_for_timezone(now, user.profile.timezone)
for story in stories:
story_date = localtime_for_timezone(story['story_date'], user.profile.timezone)
story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz)
story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz)
starred_date = localtime_for_timezone(story['starred_date'], user.profile.timezone)
story['starred_date'] = format_story_link_date__long(starred_date, nowtz)
story['starred_timestamp'] = starred_date.strftime('%s')
story['read_status'] = 1
story['starred'] = True
story['intelligence'] = {
'feed': 1,
'author': 0,
'tags': 0,
'title': 0,
}
if story['story_hash'] in shared_stories:
story['shared'] = True
story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments'])
search_log = "~SN~FG(~SB%s~SN)" % query if query else ""
logging.user(request, "~FCLoading starred stories: ~SB%s stories %s" % (len(stories), search_log))
return {
"stories": stories,
"user_profiles": user_profiles,
'feeds': unsub_feeds.values() if version == 2 else unsub_feeds,
"message": message,
}
@json.json_view
def starred_story_hashes(request):
user = get_user(request)
include_timestamps = is_true(request.REQUEST.get('include_timestamps', False))
mstories = MStarredStory.objects(
user_id=user.pk
).only('story_hash', 'starred_date').order_by('-starred_date')
if include_timestamps:
story_hashes = [(s.story_hash, s.starred_date.strftime("%s")) for s in mstories]
else:
story_hashes = [s.story_hash for s in mstories]
logging.user(request, "~FYLoading ~FCstarred story hashes~FY: %s story hashes" %
(len(story_hashes)))
return dict(starred_story_hashes=story_hashes)
def starred_stories_rss_feed(request, user_id, secret_token, tag_slug):
try:
user = User.objects.get(pk=user_id)
except User.DoesNotExist:
raise Http404
try:
tag_counts = MStarredStoryCounts.objects.get(user_id=user_id, slug=tag_slug)
except MStarredStoryCounts.MultipleObjectsReturned:
tag_counts = MStarredStoryCounts.objects(user_id=user_id, slug=tag_slug).first()
except MStarredStoryCounts.DoesNotExist:
raise Http404
data = {}
data['title'] = "Saved Stories - %s" % tag_counts.tag
data['link'] = "%s%s" % (
settings.NEWSBLUR_URL,
reverse('saved-stories-tag', kwargs=dict(tag_name=tag_slug)))
data['description'] = "Stories saved by %s on NewsBlur with the tag \"%s\"." % (user.username,
tag_counts.tag)
data['lastBuildDate'] = datetime.datetime.utcnow()
data['generator'] = 'NewsBlur - %s' % settings.NEWSBLUR_URL
data['docs'] = None
data['author_name'] = user.username
data['feed_url'] = "%s%s" % (
settings.NEWSBLUR_URL,
reverse('starred-stories-rss-feed',
kwargs=dict(user_id=user_id, secret_token=secret_token, tag_slug=tag_slug)),
)
rss = feedgenerator.Atom1Feed(**data)
if not tag_counts.tag:
starred_stories = MStarredStory.objects(
user_id=user.pk
).order_by('-starred_date').limit(25)
else:
starred_stories = MStarredStory.objects(
user_id=user.pk,
user_tags__contains=tag_counts.tag
).order_by('-starred_date').limit(25)
for starred_story in starred_stories:
story_data = {
'title': starred_story.story_title,
'link': starred_story.story_permalink,
'description': (starred_story.story_content_z and
zlib.decompress(starred_story.story_content_z)),
'author_name': starred_story.story_author_name,
'categories': starred_story.story_tags,
'unique_id': starred_story.story_guid,
'pubdate': starred_story.starred_date,
}
rss.add_item(**story_data)
logging.user(request, "~FBGenerating ~SB%s~SN's saved story RSS feed (%s, %s stories): ~FM%s" % (
user.username,
tag_counts.tag,
tag_counts.count,
request.META.get('HTTP_USER_AGENT', "")[:24]
))
return HttpResponse(rss.writeString('utf-8'), content_type='application/rss+xml')
@json.json_view
def load_read_stories(request):
user = get_user(request)
offset = int(request.REQUEST.get('offset', 0))
limit = int(request.REQUEST.get('limit', 10))
page = int(request.REQUEST.get('page', 0))
order = request.REQUEST.get('order', 'newest')
query = request.REQUEST.get('query')
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
message = None
if page: offset = limit * (page - 1)
if query:
stories = []
message = "Not implemented yet."
# if user.profile.is_premium:
# stories = MStarredStory.find_stories(query, user.pk, offset=offset, limit=limit)
# else:
# stories = []
# message = "You must be a premium subscriber to search."
else:
story_hashes = RUserStory.get_read_stories(user.pk, offset=offset, limit=limit, order=order)
mstories = MStory.objects(story_hash__in=story_hashes)
stories = Feed.format_stories(mstories)
stories = sorted(stories, key=lambda story: story_hashes.index(story['story_hash']),
reverse=bool(order=="oldest"))
stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk, check_all=True)
story_hashes = [story['story_hash'] for story in stories]
story_feed_ids = list(set(s['story_feed_id'] for s in stories))
usersub_ids = UserSubscription.objects.filter(user__pk=user.pk, feed__pk__in=story_feed_ids).values('feed__pk')
usersub_ids = [us['feed__pk'] for us in usersub_ids]
unsub_feed_ids = list(set(story_feed_ids).difference(set(usersub_ids)))
unsub_feeds = Feed.objects.filter(pk__in=unsub_feed_ids)
unsub_feeds = [feed.canonical(include_favicon=False) for feed in unsub_feeds]
shared_stories = MSharedStory.objects(user_id=user.pk,
story_hash__in=story_hashes)\
.only('story_hash', 'shared_date', 'comments')
shared_stories = dict([(story.story_hash, dict(shared_date=story.shared_date,
comments=story.comments))
for story in shared_stories])
starred_stories = MStarredStory.objects(user_id=user.pk,
story_hash__in=story_hashes)\
.only('story_hash', 'starred_date')
starred_stories = dict([(story.story_hash, story.starred_date)
for story in starred_stories])
nowtz = localtime_for_timezone(now, user.profile.timezone)
for story in stories:
story_date = localtime_for_timezone(story['story_date'], user.profile.timezone)
story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz)
story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz)
story['read_status'] = 1
story['intelligence'] = {
'feed': 1,
'author': 0,
'tags': 0,
'title': 0,
}
if story['story_hash'] in starred_stories:
story['starred'] = True
starred_date = localtime_for_timezone(starred_stories[story['story_hash']],
user.profile.timezone)
story['starred_date'] = format_story_link_date__long(starred_date, now)
story['starred_timestamp'] = starred_date.strftime('%s')
if story['story_hash'] in shared_stories:
story['shared'] = True
story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments'])
search_log = "~SN~FG(~SB%s~SN)" % query if query else ""
logging.user(request, "~FCLoading read stories: ~SB%s stories %s" % (len(stories), search_log))
return {
"stories": stories,
"user_profiles": user_profiles,
"feeds": unsub_feeds,
"message": message,
}
@json.json_view
def load_river_stories__redis(request):
limit = 12
start = time.time()
user = get_user(request)
message = None
feed_ids = [int(feed_id) for feed_id in request.REQUEST.getlist('feeds') if feed_id]
if not feed_ids:
feed_ids = [int(feed_id) for feed_id in request.REQUEST.getlist('f') if feed_id]
story_hashes = request.REQUEST.getlist('h')[:100]
original_feed_ids = list(feed_ids)
page = int(request.REQUEST.get('page', 1))
order = request.REQUEST.get('order', 'newest')
read_filter = request.REQUEST.get('read_filter', 'unread')
query = request.REQUEST.get('query')
include_hidden = is_true(request.REQUEST.get('include_hidden', False))
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
usersubs = []
code = 1
user_search = None
offset = (page-1) * limit
limit = page * limit
story_date_order = "%sstory_date" % ('' if order == 'oldest' else '-')
if story_hashes:
unread_feed_story_hashes = None
read_filter = 'unread'
mstories = MStory.objects(story_hash__in=story_hashes).order_by(story_date_order)
stories = Feed.format_stories(mstories)
elif query:
if user.profile.is_premium:
user_search = MUserSearch.get_user(user.pk)
user_search.touch_search_date()
usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids,
read_filter='all')
feed_ids = [sub.feed_id for sub in usersubs]
stories = Feed.find_feed_stories(feed_ids, query, order=order, offset=offset, limit=limit)
mstories = stories
unread_feed_story_hashes = UserSubscription.story_hashes(user.pk, feed_ids=feed_ids,
read_filter="unread", order=order,
group_by_feed=False,
cutoff_date=user.profile.unread_cutoff)
else:
stories = []
mstories = []
message = "You must be a premium subscriber to search."
elif read_filter == 'starred':
mstories = MStarredStory.objects(
user_id=user.pk,
story_feed_id__in=feed_ids
).order_by('%sstarred_date' % ('-' if order == 'newest' else ''))[offset:offset+limit]
stories = Feed.format_stories(mstories)
else:
usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids,
read_filter=read_filter)
all_feed_ids = [f for f in feed_ids]
feed_ids = [sub.feed_id for sub in usersubs]
if feed_ids:
params = {
"user_id": user.pk,
"feed_ids": feed_ids,
"all_feed_ids": all_feed_ids,
"offset": offset,
"limit": limit,
"order": order,
"read_filter": read_filter,
"usersubs": usersubs,
"cutoff_date": user.profile.unread_cutoff,
}
story_hashes, unread_feed_story_hashes = UserSubscription.feed_stories(**params)
else:
story_hashes = []
unread_feed_story_hashes = []
mstories = MStory.objects(story_hash__in=story_hashes).order_by(story_date_order)
stories = Feed.format_stories(mstories)
found_feed_ids = list(set([story['story_feed_id'] for story in stories]))
stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk)
if not usersubs:
usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=found_feed_ids,
read_filter=read_filter)
trained_feed_ids = [sub.feed_id for sub in usersubs if sub.is_trained]
found_trained_feed_ids = list(set(trained_feed_ids) & set(found_feed_ids))
# Find starred stories
if found_feed_ids:
if read_filter == 'starred':
starred_stories = mstories
else:
starred_stories = MStarredStory.objects(
user_id=user.pk,
story_feed_id__in=found_feed_ids
).only('story_hash', 'starred_date')
starred_stories = dict([(story.story_hash, dict(starred_date=story.starred_date,
user_tags=story.user_tags))
for story in starred_stories])
else:
starred_stories = {}
# Intelligence classifiers for all feeds involved
if found_trained_feed_ids:
classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk,
feed_id__in=found_trained_feed_ids,
social_user_id=0))
classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk,
feed_id__in=found_trained_feed_ids))
classifier_titles = list(MClassifierTitle.objects(user_id=user.pk,
feed_id__in=found_trained_feed_ids))
classifier_tags = list(MClassifierTag.objects(user_id=user.pk,
feed_id__in=found_trained_feed_ids))
else:
classifier_feeds = []
classifier_authors = []
classifier_titles = []
classifier_tags = []
classifiers = sort_classifiers_by_feed(user=user, feed_ids=found_feed_ids,
classifier_feeds=classifier_feeds,
classifier_authors=classifier_authors,
classifier_titles=classifier_titles,
classifier_tags=classifier_tags)
# Just need to format stories
nowtz = localtime_for_timezone(now, user.profile.timezone)
for story in stories:
if read_filter == 'starred':
story['read_status'] = 1
else:
story['read_status'] = 0
if read_filter == 'all' or query:
if (unread_feed_story_hashes is not None and
story['story_hash'] not in unread_feed_story_hashes):
story['read_status'] = 1
story_date = localtime_for_timezone(story['story_date'], user.profile.timezone)
story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz)
story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz)
if story['story_hash'] in starred_stories:
story['starred'] = True
starred_date = localtime_for_timezone(starred_stories[story['story_hash']]['starred_date'],
user.profile.timezone)
story['starred_date'] = format_story_link_date__long(starred_date, now)
story['starred_timestamp'] = starred_date.strftime('%s')
story['user_tags'] = starred_stories[story['story_hash']]['user_tags']
story['intelligence'] = {
'feed': apply_classifier_feeds(classifier_feeds, story['story_feed_id']),
'author': apply_classifier_authors(classifier_authors, story),
'tags': apply_classifier_tags(classifier_tags, story),
'title': apply_classifier_titles(classifier_titles, story),
}
story['score'] = UserSubscription.score_story(story['intelligence'])
if not user.profile.is_premium:
message = "The full River of News is a premium feature."
code = 0
# if page > 1:
# stories = []
# else:
# stories = stories[:5]
diff = time.time() - start
timediff = round(float(diff), 2)
logging.user(request, "~FYLoading ~FCriver stories~FY: ~SBp%s~SN (%s/%s "
"stories, ~SN%s/%s/%s feeds, %s/%s)" %
(page, len(stories), len(mstories), len(found_feed_ids),
len(feed_ids), len(original_feed_ids), order, read_filter))
if not include_hidden:
hidden_stories_removed = 0
new_stories = []
for story in stories:
if story['score'] >= 0:
new_stories.append(story)
else:
hidden_stories_removed += 1
stories = new_stories
# if page <= 1:
# import random
# time.sleep(random.randint(0, 6))
data = dict(code=code,
message=message,
stories=stories,
classifiers=classifiers,
elapsed_time=timediff,
user_search=user_search,
user_profiles=user_profiles)
if not include_hidden: data['hidden_stories_removed'] = hidden_stories_removed
return data
@json.json_view
def unread_story_hashes__old(request):
user = get_user(request)
feed_ids = [int(feed_id) for feed_id in request.REQUEST.getlist('feed_id') if feed_id]
include_timestamps = is_true(request.REQUEST.get('include_timestamps', False))
usersubs = {}
if not feed_ids:
usersubs = UserSubscription.objects.filter(Q(unread_count_neutral__gt=0) |
Q(unread_count_positive__gt=0),
user=user, active=True)
feed_ids = [sub.feed_id for sub in usersubs]
else:
usersubs = UserSubscription.objects.filter(Q(unread_count_neutral__gt=0) |
Q(unread_count_positive__gt=0),
user=user, active=True, feed__in=feed_ids)
unread_feed_story_hashes = {}
story_hash_count = 0
usersubs = dict((sub.feed_id, sub) for sub in usersubs)
for feed_id in feed_ids:
if feed_id in usersubs:
us = usersubs[feed_id]
else:
continue
if not us.unread_count_neutral and not us.unread_count_positive:
continue
unread_feed_story_hashes[feed_id] = us.get_stories(read_filter='unread', limit=500,
withscores=include_timestamps,
hashes_only=True,
default_cutoff_date=user.profile.unread_cutoff)
story_hash_count += len(unread_feed_story_hashes[feed_id])
logging.user(request, "~FYLoading ~FCunread story hashes~FY: ~SB%s feeds~SN (%s story hashes)" %
(len(feed_ids), len(story_hash_count)))
return dict(unread_feed_story_hashes=unread_feed_story_hashes)
@json.json_view
def unread_story_hashes(request):
user = get_user(request)
feed_ids = [int(feed_id) for feed_id in request.REQUEST.getlist('feed_id') if feed_id]
include_timestamps = is_true(request.REQUEST.get('include_timestamps', False))
order = request.REQUEST.get('order', 'newest')
read_filter = request.REQUEST.get('read_filter', 'unread')
story_hashes = UserSubscription.story_hashes(user.pk, feed_ids=feed_ids,
order=order, read_filter=read_filter,
include_timestamps=include_timestamps,
cutoff_date=user.profile.unread_cutoff)
logging.user(request, "~FYLoading ~FCunread story hashes~FY: ~SB%s feeds~SN (%s story hashes)" %
(len(feed_ids), len(story_hashes)))
return dict(unread_feed_story_hashes=story_hashes)
@ajax_login_required
@json.json_view
def mark_all_as_read(request):
code = 1
try:
days = int(request.REQUEST.get('days', 0))
except ValueError:
return dict(code=-1, message="Days parameter must be an integer, not: %s" %
request.REQUEST.get('days'))
read_date = datetime.datetime.utcnow() - datetime.timedelta(days=days)
feeds = UserSubscription.objects.filter(user=request.user)
socialsubs = MSocialSubscription.objects.filter(user_id=request.user.pk)
for subtype in [feeds, socialsubs]:
for sub in subtype:
if days == 0:
sub.mark_feed_read()
else:
if sub.mark_read_date < read_date:
sub.needs_unread_recalc = True
sub.mark_read_date = read_date
sub.save()
logging.user(request, "~FMMarking all as read: ~SB%s days" % (days,))
return dict(code=code)
@ajax_login_required
@json.json_view
def mark_story_as_read(request):
story_ids = request.REQUEST.getlist('story_id')
try:
feed_id = int(get_argument_or_404(request, 'feed_id'))
except ValueError:
return dict(code=-1, errors=["You must pass a valid feed_id: %s" %
request.REQUEST.get('feed_id')])
try:
usersub = UserSubscription.objects.select_related('feed').get(user=request.user, feed=feed_id)
except Feed.DoesNotExist:
duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id)
if duplicate_feed:
feed_id = duplicate_feed[0].feed_id
try:
usersub = UserSubscription.objects.get(user=request.user,
feed=duplicate_feed[0].feed)
except (Feed.DoesNotExist):
return dict(code=-1, errors=["No feed exists for feed_id %d." % feed_id])
else:
return dict(code=-1, errors=["No feed exists for feed_id %d." % feed_id])
except UserSubscription.DoesNotExist:
usersub = None
if usersub:
data = usersub.mark_story_ids_as_read(story_ids, request=request)
else:
data = dict(code=-1, errors=["User is not subscribed to this feed."])
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'feed:%s' % feed_id)
return data
@ajax_login_required
@json.json_view
def mark_story_hashes_as_read(request):
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
story_hashes = request.REQUEST.getlist('story_hash')
feed_ids, friend_ids = RUserStory.mark_story_hashes_read(request.user.pk, story_hashes)
if friend_ids:
socialsubs = MSocialSubscription.objects.filter(
user_id=request.user.pk,
subscription_user_id__in=friend_ids)
for socialsub in socialsubs:
if not socialsub.needs_unread_recalc:
socialsub.needs_unread_recalc = True
socialsub.save()
r.publish(request.user.username, 'social:%s' % socialsub.subscription_user_id)
# Also count on original subscription
for feed_id in feed_ids:
usersubs = UserSubscription.objects.filter(user=request.user.pk, feed=feed_id)
if usersubs:
usersub = usersubs[0]
if not usersub.needs_unread_recalc:
usersub.needs_unread_recalc = True
usersub.save(update_fields=['needs_unread_recalc'])
r.publish(request.user.username, 'feed:%s' % feed_id)
hash_count = len(story_hashes)
logging.user(request, "~FYRead %s %s in feed/socialsubs: %s/%s" % (
hash_count, 'story' if hash_count == 1 else 'stories', feed_ids, friend_ids))
return dict(code=1, story_hashes=story_hashes,
feed_ids=feed_ids, friend_user_ids=friend_ids)
@ajax_login_required
@json.json_view
def mark_feed_stories_as_read(request):
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
feeds_stories = request.REQUEST.get('feeds_stories', "{}")
feeds_stories = json.decode(feeds_stories)
data = {
'code': -1,
'message': 'Nothing was marked as read'
}
for feed_id, story_ids in feeds_stories.items():
try:
feed_id = int(feed_id)
except ValueError:
continue
try:
usersub = UserSubscription.objects.select_related('feed').get(user=request.user, feed=feed_id)
data = usersub.mark_story_ids_as_read(story_ids, request=request)
except UserSubscription.DoesNotExist:
return dict(code=-1, error="You are not subscribed to this feed_id: %d" % feed_id)
except Feed.DoesNotExist:
duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id)
try:
if not duplicate_feed: raise Feed.DoesNotExist
usersub = UserSubscription.objects.get(user=request.user,
feed=duplicate_feed[0].feed)
data = usersub.mark_story_ids_as_read(story_ids, request=request)
except (UserSubscription.DoesNotExist, Feed.DoesNotExist):
return dict(code=-1, error="No feed exists for feed_id: %d" % feed_id)
r.publish(request.user.username, 'feed:%s' % feed_id)
return data
@ajax_login_required
@json.json_view
def mark_social_stories_as_read(request):
code = 1
errors = []
data = {}
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
users_feeds_stories = request.REQUEST.get('users_feeds_stories', "{}")
users_feeds_stories = json.decode(users_feeds_stories)
for social_user_id, feeds in users_feeds_stories.items():
for feed_id, story_ids in feeds.items():
feed_id = int(feed_id)
try:
socialsub = MSocialSubscription.objects.get(user_id=request.user.pk,
subscription_user_id=social_user_id)
data = socialsub.mark_story_ids_as_read(story_ids, feed_id, request=request)
except OperationError, e:
code = -1
errors.append("Already read story: %s" % e)
except MSocialSubscription.DoesNotExist:
MSocialSubscription.mark_unsub_story_ids_as_read(request.user.pk, social_user_id,
story_ids, feed_id,
request=request)
except Feed.DoesNotExist:
duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id)
if duplicate_feed:
try:
socialsub = MSocialSubscription.objects.get(user_id=request.user.pk,
subscription_user_id=social_user_id)
data = socialsub.mark_story_ids_as_read(story_ids, duplicate_feed[0].feed.pk, request=request)
except (UserSubscription.DoesNotExist, Feed.DoesNotExist):
code = -1
errors.append("No feed exists for feed_id %d." % feed_id)
else:
continue
r.publish(request.user.username, 'feed:%s' % feed_id)
r.publish(request.user.username, 'social:%s' % social_user_id)
data.update(code=code, errors=errors)
return data
@required_params('story_id', feed_id=int)
@ajax_login_required
@json.json_view
def mark_story_as_unread(request):
story_id = request.REQUEST.get('story_id', None)
feed_id = int(request.REQUEST.get('feed_id', 0))
try:
usersub = UserSubscription.objects.select_related('feed').get(user=request.user, feed=feed_id)
feed = usersub.feed
except UserSubscription.DoesNotExist:
usersub = None
feed = Feed.get_by_id(feed_id)
if usersub and not usersub.needs_unread_recalc:
usersub.needs_unread_recalc = True
usersub.save(update_fields=['needs_unread_recalc'])
data = dict(code=0, payload=dict(story_id=story_id))
story, found_original = MStory.find_story(feed_id, story_id)
if not story:
logging.user(request, "~FY~SBUnread~SN story in feed: %s (NOT FOUND)" % (feed))
return dict(code=-1, message="Story not found.")
if usersub:
data = usersub.invert_read_stories_after_unread_story(story, request)
message = RUserStory.story_can_be_marked_read_by_user(story, request.user)
if message:
data['code'] = -1
data['message'] = message
return data
social_subs = MSocialSubscription.mark_dirty_sharing_story(user_id=request.user.pk,
story_feed_id=feed_id,
story_guid_hash=story.guid_hash)
dirty_count = social_subs and social_subs.count()
dirty_count = ("(%s social_subs)" % dirty_count) if dirty_count else ""
RUserStory.mark_story_hash_unread(user_id=request.user.pk, story_hash=story.story_hash)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'feed:%s' % feed_id)
logging.user(request, "~FY~SBUnread~SN story in feed: %s %s" % (feed, dirty_count))
return data
@ajax_login_required
@json.json_view
@required_params('story_hash')
def mark_story_hash_as_unread(request):
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
story_hash = request.REQUEST.get('story_hash')
feed_id, _ = MStory.split_story_hash(story_hash)
story, _ = MStory.find_story(feed_id, story_hash)
if not story:
data = dict(code=-1, message="That story has been removed from the feed, no need to mark it unread.")
return data
message = RUserStory.story_can_be_marked_read_by_user(story, request.user)
if message:
data = dict(code=-1, message=message)
return data
# Also count on original subscription
usersubs = UserSubscription.objects.filter(user=request.user.pk, feed=feed_id)
if usersubs:
usersub = usersubs[0]
if not usersub.needs_unread_recalc:
usersub.needs_unread_recalc = True
usersub.save(update_fields=['needs_unread_recalc'])
data = usersub.invert_read_stories_after_unread_story(story, request)
r.publish(request.user.username, 'feed:%s' % feed_id)
feed_id, friend_ids = RUserStory.mark_story_hash_unread(request.user.pk, story_hash)
if friend_ids:
socialsubs = MSocialSubscription.objects.filter(
user_id=request.user.pk,
subscription_user_id__in=friend_ids)
for socialsub in socialsubs:
if not socialsub.needs_unread_recalc:
socialsub.needs_unread_recalc = True
socialsub.save()
r.publish(request.user.username, 'social:%s' % socialsub.subscription_user_id)
logging.user(request, "~FYUnread story in feed/socialsubs: %s/%s" % (feed_id, friend_ids))
return dict(code=1, story_hash=story_hash, feed_id=feed_id, friend_user_ids=friend_ids)
@ajax_login_required
@json.json_view
def mark_feed_as_read(request):
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
feed_ids = request.REQUEST.getlist('feed_id')
cutoff_timestamp = int(request.REQUEST.get('cutoff_timestamp', 0))
direction = request.REQUEST.get('direction', 'older')
multiple = len(feed_ids) > 1
code = 1
errors = []
cutoff_date = datetime.datetime.fromtimestamp(cutoff_timestamp) if cutoff_timestamp else None
for feed_id in feed_ids:
if 'social:' in feed_id:
user_id = int(feed_id.replace('social:', ''))
try:
sub = MSocialSubscription.objects.get(user_id=request.user.pk,
subscription_user_id=user_id)
except MSocialSubscription.DoesNotExist:
logging.user(request, "~FRCouldn't find socialsub: %s" % user_id)
continue
if not multiple:
sub_user = User.objects.get(pk=sub.subscription_user_id)
logging.user(request, "~FMMarking social feed as read: ~SB%s" % (sub_user.username,))
else:
try:
feed = Feed.objects.get(id=feed_id)
sub = UserSubscription.objects.get(feed=feed, user=request.user)
if not multiple:
logging.user(request, "~FMMarking feed as read: ~SB%s" % (feed,))
except (Feed.DoesNotExist, UserSubscription.DoesNotExist), e:
errors.append("User not subscribed: %s" % e)
continue
except (ValueError), e:
errors.append("Invalid feed_id: %s" % e)
continue
if not sub:
errors.append("User not subscribed: %s" % feed_id)
continue
try:
if direction == "older":
marked_read = sub.mark_feed_read(cutoff_date=cutoff_date)
else:
marked_read = sub.mark_newer_stories_read(cutoff_date=cutoff_date)
if marked_read and not multiple:
r.publish(request.user.username, 'feed:%s' % feed_id)
except IntegrityError, e:
errors.append("Could not mark feed as read: %s" % e)
code = -1
if multiple:
logging.user(request, "~FMMarking ~SB%s~SN feeds as read" % len(feed_ids))
r.publish(request.user.username, 'refresh:%s' % ','.join(feed_ids))
if errors:
logging.user(request, "~FMMarking read had errors: ~FR%s" % errors)
return dict(code=code, errors=errors, cutoff_date=cutoff_date, direction=direction)
def _parse_user_info(user):
return {
'user_info': {
'is_anonymous': json.encode(user.is_anonymous()),
'is_authenticated': json.encode(user.is_authenticated()),
'username': json.encode(user.username if user.is_authenticated() else 'Anonymous')
}
}
@ajax_login_required
@json.json_view
def add_url(request):
code = 0
url = request.POST['url']
folder = request.POST.get('folder', '')
new_folder = request.POST.get('new_folder')
auto_active = is_true(request.POST.get('auto_active', 1))
skip_fetch = is_true(request.POST.get('skip_fetch', False))
feed = None
if not url:
code = -1
message = 'Enter in the website address or the feed URL.'
elif any([(banned_url in url) for banned_url in BANNED_URLS]):
code = -1
message = "The publisher of this website has banned NewsBlur."
else:
if new_folder:
usf, _ = UserSubscriptionFolders.objects.get_or_create(user=request.user)
usf.add_folder(folder, new_folder)
folder = new_folder
code, message, us = UserSubscription.add_subscription(user=request.user, feed_address=url,
folder=folder, auto_active=auto_active,
skip_fetch=skip_fetch)
feed = us and us.feed
if feed:
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:%s' % feed.pk)
MUserSearch.schedule_index_feeds_for_search(feed.pk, request.user.pk)
return dict(code=code, message=message, feed=feed)
@ajax_login_required
@json.json_view
def add_folder(request):
folder = request.POST['folder']
parent_folder = request.POST.get('parent_folder', '')
folders = None
logging.user(request, "~FRAdding Folder: ~SB%s (in %s)" % (folder, parent_folder))
if folder:
code = 1
message = ""
user_sub_folders_object, _ = UserSubscriptionFolders.objects.get_or_create(user=request.user)
user_sub_folders_object.add_folder(parent_folder, folder)
folders = json.decode(user_sub_folders_object.folders)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
else:
code = -1
message = "Gotta write in a folder name."
return dict(code=code, message=message, folders=folders)
@ajax_login_required
@json.json_view
def delete_feed(request):
feed_id = int(request.POST['feed_id'])
in_folder = request.POST.get('in_folder', None)
if not in_folder or in_folder == ' ':
in_folder = ""
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders.delete_feed(feed_id, in_folder)
feed = Feed.objects.filter(pk=feed_id)
if feed:
feed[0].count_subscribers()
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, message="Removed %s from '%s'." % (feed, in_folder))
@ajax_login_required
@json.json_view
def delete_feed_by_url(request):
message = ""
code = 0
url = request.POST['url']
in_folder = request.POST.get('in_folder', '')
if in_folder == ' ':
in_folder = ""
feed = Feed.get_feed_from_url(url, create=False)
if feed:
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders.delete_feed(feed.pk, in_folder)
code = 1
feed = Feed.objects.filter(pk=feed.pk)
if feed:
feed[0].count_subscribers()
else:
code = -1
message = "URL not found."
return dict(code=code, message=message)
@ajax_login_required
@json.json_view
def delete_folder(request):
folder_to_delete = request.POST.get('folder_name') or request.POST.get('folder_to_delete')
in_folder = request.POST.get('in_folder', None)
feed_ids_in_folder = [int(f) for f in request.REQUEST.getlist('feed_id') if f]
request.user.profile.send_opml_export_email(reason="You have deleted an entire folder of feeds, so here's a backup just in case.")
# Works piss poor with duplicate folder titles, if they are both in the same folder.
# Deletes all, but only in the same folder parent. But nobody should be doing that, right?
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders.delete_folder(folder_to_delete, in_folder, feed_ids_in_folder)
folders = json.decode(user_sub_folders.folders)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, folders=folders)
@required_params('feeds_by_folder')
@ajax_login_required
@json.json_view
def delete_feeds_by_folder(request):
feeds_by_folder = json.decode(request.POST['feeds_by_folder'])
request.user.profile.send_opml_export_email(reason="You have deleted a number of feeds at once, so here's a backup just in case.")
# Works piss poor with duplicate folder titles, if they are both in the same folder.
# Deletes all, but only in the same folder parent. But nobody should be doing that, right?
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders.delete_feeds_by_folder(feeds_by_folder)
folders = json.decode(user_sub_folders.folders)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, folders=folders)
@ajax_login_required
@json.json_view
def rename_feed(request):
feed = get_object_or_404(Feed, pk=int(request.POST['feed_id']))
user_sub = UserSubscription.objects.get(user=request.user, feed=feed)
feed_title = request.POST['feed_title']
logging.user(request, "~FRRenaming feed '~SB%s~SN' to: ~SB%s" % (
feed.feed_title, feed_title))
user_sub.user_title = feed_title
user_sub.save()
return dict(code=1)
@ajax_login_required
@json.json_view
def rename_folder(request):
folder_to_rename = request.POST.get('folder_name') or request.POST.get('folder_to_rename')
new_folder_name = request.POST['new_folder_name']
in_folder = request.POST.get('in_folder', '')
code = 0
# Works piss poor with duplicate folder titles, if they are both in the same folder.
# renames all, but only in the same folder parent. But nobody should be doing that, right?
if folder_to_rename and new_folder_name:
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders.rename_folder(folder_to_rename, new_folder_name, in_folder)
code = 1
else:
code = -1
return dict(code=code)
@ajax_login_required
@json.json_view
def move_feed_to_folders(request):
feed_id = int(request.POST['feed_id'])
in_folders = request.POST.getlist('in_folders', '')
to_folders = request.POST.getlist('to_folders', '')
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders = user_sub_folders.move_feed_to_folders(feed_id, in_folders=in_folders,
to_folders=to_folders)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, folders=json.decode(user_sub_folders.folders))
@ajax_login_required
@json.json_view
def move_feed_to_folder(request):
feed_id = int(request.POST['feed_id'])
in_folder = request.POST.get('in_folder', '')
to_folder = request.POST.get('to_folder', '')
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders = user_sub_folders.move_feed_to_folder(feed_id, in_folder=in_folder,
to_folder=to_folder)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, folders=json.decode(user_sub_folders.folders))
@ajax_login_required
@json.json_view
def move_folder_to_folder(request):
folder_name = request.POST['folder_name']
in_folder = request.POST.get('in_folder', '')
to_folder = request.POST.get('to_folder', '')
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
user_sub_folders = user_sub_folders.move_folder_to_folder(folder_name, in_folder=in_folder, to_folder=to_folder)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, folders=json.decode(user_sub_folders.folders))
@required_params('feeds_by_folder', 'to_folder')
@ajax_login_required
@json.json_view
def move_feeds_by_folder_to_folder(request):
feeds_by_folder = json.decode(request.POST['feeds_by_folder'])
to_folder = request.POST['to_folder']
new_folder = request.POST.get('new_folder', None)
request.user.profile.send_opml_export_email(reason="You have moved a number of feeds at once, so here's a backup just in case.")
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
if new_folder:
user_sub_folders.add_folder(to_folder, new_folder)
to_folder = new_folder
user_sub_folders = user_sub_folders.move_feeds_by_folder_to_folder(feeds_by_folder, to_folder)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
return dict(code=1, folders=json.decode(user_sub_folders.folders))
@login_required
def add_feature(request):
if not request.user.is_staff:
return HttpResponseForbidden()
code = -1
form = FeatureForm(request.POST)
if form.is_valid():
form.save()
code = 1
return HttpResponseRedirect(reverse('index'))
return dict(code=code)
@json.json_view
def load_features(request):
user = get_user(request)
page = max(int(request.REQUEST.get('page', 0)), 0)
logging.user(request, "~FBBrowse features: ~SBPage #%s" % (page+1))
features = Feature.objects.all()[page*3:(page+1)*3+1].values()
features = [{
'description': f['description'],
'date': localtime_for_timezone(f['date'], user.profile.timezone).strftime("%b %d, %Y")
} for f in features]
return features
@ajax_login_required
@json.json_view
def save_feed_order(request):
folders = request.POST.get('folders')
if folders:
# Test that folders can be JSON decoded
folders_list = json.decode(folders)
assert folders_list is not None
logging.user(request, "~FBFeed re-ordering: ~SB%s folders/feeds" % (len(folders_list)))
user_sub_folders = UserSubscriptionFolders.objects.get(user=request.user)
user_sub_folders.folders = folders
user_sub_folders.save()
return {}
@json.json_view
def feeds_trainer(request):
classifiers = []
feed_id = request.REQUEST.get('feed_id')
user = get_user(request)
usersubs = UserSubscription.objects.filter(user=user, active=True)
if feed_id:
feed = get_object_or_404(Feed, pk=feed_id)
usersubs = usersubs.filter(feed=feed)
usersubs = usersubs.select_related('feed').order_by('-feed__stories_last_month')
for us in usersubs:
if (not us.is_trained and us.feed.stories_last_month > 0) or feed_id:
classifier = dict()
classifier['classifiers'] = get_classifiers_for_user(user, feed_id=us.feed.pk)
classifier['feed_id'] = us.feed_id
classifier['stories_last_month'] = us.feed.stories_last_month
classifier['num_subscribers'] = us.feed.num_subscribers
classifier['feed_tags'] = json.decode(us.feed.data.popular_tags) if us.feed.data.popular_tags else []
classifier['feed_authors'] = json.decode(us.feed.data.popular_authors) if us.feed.data.popular_authors else []
classifiers.append(classifier)
user.profile.has_trained_intelligence = True
user.profile.save()
logging.user(user, "~FGLoading Trainer: ~SB%s feeds" % (len(classifiers)))
return classifiers
@ajax_login_required
@json.json_view
def save_feed_chooser(request):
is_premium = request.user.profile.is_premium
approved_feeds = [int(feed_id) for feed_id in request.POST.getlist('approved_feeds') if feed_id]
if not is_premium:
approved_feeds = approved_feeds[:64]
activated = 0
usersubs = UserSubscription.objects.filter(user=request.user)
for sub in usersubs:
try:
if sub.feed_id in approved_feeds:
activated += 1
if not sub.active:
sub.active = True
sub.save()
if sub.feed.active_subscribers <= 0:
sub.feed.count_subscribers()
elif sub.active:
sub.active = False
sub.save()
except Feed.DoesNotExist:
pass
request.user.profile.queue_new_feeds()
request.user.profile.refresh_stale_feeds(exclude_new=True)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')
logging.user(request, "~BB~FW~SBFeed chooser: ~FC%s~SN/~SB%s" % (
activated,
usersubs.count()
))
return {'activated': activated}
@ajax_login_required
def retrain_all_sites(request):
for sub in UserSubscription.objects.filter(user=request.user):
sub.is_trained = False
sub.save()
return feeds_trainer(request)
@login_required
def activate_premium_account(request):
try:
usersubs = UserSubscription.objects.select_related('feed').filter(user=request.user)
for sub in usersubs:
sub.active = True
sub.save()
if sub.feed.premium_subscribers <= 0:
sub.feed.count_subscribers()
sub.feed.schedule_feed_fetch_immediately()
except Exception, e:
subject = "Premium activation failed"
message = "%s -- %s\n\n%s" % (request.user, usersubs, e)
mail_admins(subject, message, fail_silently=True)
request.user.profile.is_premium = True
request.user.profile.save()
return HttpResponseRedirect(reverse('index'))
@login_required
def login_as(request):
if not request.user.is_staff:
logging.user(request, "~SKNON-STAFF LOGGING IN AS ANOTHER USER!")
assert False
return HttpResponseForbidden()
username = request.GET['user']
user = get_object_or_404(User, username__iexact=username)
user.backend = settings.AUTHENTICATION_BACKENDS[0]
login_user(request, user)
return HttpResponseRedirect(reverse('index'))
def iframe_buster(request):
logging.user(request, "~FB~SBiFrame bust!")
return HttpResponse(status=204)
@required_params('story_id', feed_id=int)
@ajax_login_required
@json.json_view
def mark_story_as_starred(request):
return _mark_story_as_starred(request)
@required_params('story_hash')
@ajax_login_required
@json.json_view
def mark_story_hash_as_starred(request):
return _mark_story_as_starred(request)
def _mark_story_as_starred(request):
code = 1
feed_id = int(request.REQUEST.get('feed_id', 0))
story_id = request.REQUEST.get('story_id', None)
story_hash = request.REQUEST.get('story_hash', None)
user_tags = request.REQUEST.getlist('user_tags')
message = ""
if story_hash:
story, _ = MStory.find_story(story_hash=story_hash)
feed_id = story and story.story_feed_id
else:
story, _ = MStory.find_story(story_feed_id=feed_id, story_id=story_id)
if not story:
return {'code': -1, 'message': "Could not find story to save."}
story_db = dict([(k, v) for k, v in story._data.items()
if k is not None and v is not None])
story_db.pop('user_id', None)
story_db.pop('starred_date', None)
story_db.pop('id', None)
story_db.pop('user_tags', None)
now = datetime.datetime.now()
story_values = dict(starred_date=now, user_tags=user_tags, **story_db)
params = dict(story_guid=story.story_guid, user_id=request.user.pk)
starred_story = MStarredStory.objects(**params).limit(1)
created = False
removed_user_tags = []
if not starred_story:
params.update(story_values)
starred_story = MStarredStory.objects.create(**params)
created = True
MActivity.new_starred_story(user_id=request.user.pk,
story_title=story.story_title,
story_feed_id=feed_id,
story_id=starred_story.story_guid)
new_user_tags = user_tags
MStarredStoryCounts.adjust_count(request.user.pk, feed_id=feed_id, amount=1)
else:
starred_story = starred_story[0]
new_user_tags = list(set(user_tags) - set(starred_story.user_tags or []))
removed_user_tags = list(set(starred_story.user_tags or []) - set(user_tags))
starred_story.user_tags = user_tags
starred_story.save()
for tag in new_user_tags:
MStarredStoryCounts.adjust_count(request.user.pk, tag=tag, amount=1)
for tag in removed_user_tags:
MStarredStoryCounts.adjust_count(request.user.pk, tag=tag, amount=-1)
if random.random() < 0.01:
MStarredStoryCounts.schedule_count_tags_for_user(request.user.pk)
MStarredStoryCounts.count_for_user(request.user.pk, total_only=True)
starred_counts, starred_count = MStarredStoryCounts.user_counts(request.user.pk, include_total=True)
if not starred_count and len(starred_counts):
starred_count = MStarredStory.objects(user_id=request.user.pk).count()
if created:
logging.user(request, "~FCStarring: ~SB%s (~FM~SB%s~FC~SN)" % (story.story_title[:32], starred_story.user_tags))
else:
logging.user(request, "~FCUpdating starred:~SN~FC ~SB%s~SN (~FM~SB%s~FC~SN)" % (story.story_title[:32], starred_story.user_tags))
return {'code': code, 'message': message, 'starred_count': starred_count, 'starred_counts': starred_counts}
@required_params('story_id')
@ajax_login_required
@json.json_view
def mark_story_as_unstarred(request):
return _mark_story_as_unstarred(request)
@required_params('story_hash')
@ajax_login_required
@json.json_view
def mark_story_hash_as_unstarred(request):
return _mark_story_as_unstarred(request)
def _mark_story_as_unstarred(request):
code = 1
story_id = request.POST.get('story_id', None)
story_hash = request.REQUEST.get('story_hash', None)
starred_counts = None
starred_story = None
if story_id:
starred_story = MStarredStory.objects(user_id=request.user.pk, story_guid=story_id)
if not story_id or not starred_story:
starred_story = MStarredStory.objects(user_id=request.user.pk, story_hash=story_hash or story_id)
if starred_story:
starred_story = starred_story[0]
logging.user(request, "~FCUnstarring: ~SB%s" % (starred_story.story_title[:50]))
user_tags = starred_story.user_tags
feed_id = starred_story.story_feed_id
MActivity.remove_starred_story(user_id=request.user.pk,
story_feed_id=starred_story.story_feed_id,
story_id=starred_story.story_guid)
starred_story.user_id = 0
try:
starred_story.save()
except NotUniqueError:
starred_story.delete()
MStarredStoryCounts.adjust_count(request.user.pk, feed_id=feed_id, amount=-1)
for tag in user_tags:
try:
MStarredStoryCounts.adjust_count(request.user.pk, tag=tag, amount=-1)
except MStarredStoryCounts.DoesNotExist:
pass
# MStarredStoryCounts.schedule_count_tags_for_user(request.user.pk)
MStarredStoryCounts.count_for_user(request.user.pk, total_only=True)
starred_counts = MStarredStoryCounts.user_counts(request.user.pk)
else:
code = -1
return {'code': code, 'starred_counts': starred_counts}
@ajax_login_required
@json.json_view
def send_story_email(request):
code = 1
message = 'OK'
story_id = request.POST['story_id']
feed_id = request.POST['feed_id']
to_addresses = request.POST.get('to', '').replace(',', ' ').replace(' ', ' ').strip().split(' ')
from_name = request.POST['from_name']
from_email = request.POST['from_email']
email_cc = is_true(request.POST.get('email_cc', 'true'))
comments = request.POST['comments']
comments = comments[:2048] # Separated due to PyLint
from_address = 'share@newsblur.com'
share_user_profile = MSocialProfile.get_user(request.user.pk)
if not to_addresses:
code = -1
message = 'Please provide at least one email address.'
elif not all(email_re.match(to_address) for to_address in to_addresses if to_addresses):
code = -1
message = 'You need to send the email to a valid email address.'
elif not email_re.match(from_email):
code = -1
message = 'You need to provide your email address.'
elif not from_name:
code = -1
message = 'You need to provide your name.'
else:
story, _ = MStory.find_story(feed_id, story_id)
story = Feed.format_story(story, feed_id, text=True)
feed = Feed.get_by_id(story['story_feed_id'])
params = {
"to_addresses": to_addresses,
"from_name": from_name,
"from_email": from_email,
"email_cc": email_cc,
"comments": comments,
"from_address": from_address,
"story": story,
"feed": feed,
"share_user_profile": share_user_profile,
}
text = render_to_string('mail/email_story.txt', params)
html = render_to_string('mail/email_story.xhtml', params)
subject = '%s' % (story['story_title'])
cc = None
if email_cc:
cc = ['%s <%s>' % (from_name, from_email)]
subject = subject.replace('\n', ' ')
msg = EmailMultiAlternatives(subject, text,
from_email='NewsBlur <%s>' % from_address,
to=to_addresses,
cc=cc,
headers={'Reply-To': '%s <%s>' % (from_name, from_email)})
msg.attach_alternative(html, "text/html")
try:
msg.send()
except boto.ses.connection.ResponseError, e:
code = -1
message = "Email error: %s" % str(e)
logging.user(request, '~BMSharing story by email to %s recipient%s: ~FY~SB%s~SN~BM~FY/~SB%s' %
(len(to_addresses), '' if len(to_addresses) == 1 else 's',
story['story_title'][:50], feed and feed.feed_title[:50]))
return {'code': code, 'message': message}
@json.json_view
def load_tutorial(request):
if request.REQUEST.get('finished'):
logging.user(request, '~BY~FW~SBFinishing Tutorial')
return {}
else:
newsblur_feed = Feed.objects.filter(feed_address__icontains='blog.newsblur.com').order_by('-pk')[0]
logging.user(request, '~BY~FW~SBLoading Tutorial')
return {
'newsblur_feed': newsblur_feed.canonical()
}
| slava-sh/NewsBlur | apps/reader/views.py | Python | mit | 95,205 |
const determineTestFilesToRun = ({ inputFile, inputArgs = [], config }) => {
const path = require("path");
const fs = require("fs");
const glob = require("glob");
let filesToRun = [];
if (inputFile) {
filesToRun.push(inputFile);
} else if (inputArgs.length > 0) {
inputArgs.forEach(inputArg => filesToRun.push(inputArg));
}
if (filesToRun.length === 0) {
const directoryContents = glob.sync(
`${config.test_directory}${path.sep}**${path.sep}*`
);
filesToRun =
directoryContents.filter(item => fs.statSync(item).isFile()) || [];
}
return filesToRun.filter(file => {
return file.match(config.test_file_extension_regexp) !== null;
});
};
module.exports = {
determineTestFilesToRun
};
| ConsenSys/truffle | packages/core/lib/commands/test/determineTestFilesToRun.js | JavaScript | mit | 744 |
import "reflect-metadata";
import {createTestingConnections, closeTestingConnections, reloadTestingDatabases} from "../../utils/test-utils";
import {Connection} from "../../../src/connection/Connection";
import {Post} from "./entity/Post";
import {expect} from "chai";
import {PostStatus} from "./model/PostStatus";
describe("github issues > #182 enums are not saved properly", () => {
let connections: Connection[];
before(async () => connections = await createTestingConnections({
entities: [__dirname + "/entity/*{.js,.ts}"],
schemaCreate: true,
dropSchemaOnConnection: true,
enabledDrivers: ["mysql"] // we can properly test lazy-relations only on one platform
}));
beforeEach(() => reloadTestingDatabases(connections));
after(() => closeTestingConnections(connections));
it("should persist successfully with enum values", () => Promise.all(connections.map(async connection => {
const post1 = new Post();
post1.status = PostStatus.NEW;
post1.title = "Hello Post #1";
// persist
await connection.entityManager.persist(post1);
const loadedPosts1 = await connection.entityManager.findOne(Post, { title: "Hello Post #1" });
expect(loadedPosts1!).not.to.be.empty;
loadedPosts1!.should.be.eql({
id: 1,
title: "Hello Post #1",
status: PostStatus.NEW
});
// remove persisted
await connection.entityManager.remove(post1);
const post2 = new Post();
post2.status = PostStatus.ACTIVE;
post2.title = "Hello Post #1";
// persist
await connection.entityManager.persist(post2);
const loadedPosts2 = await connection.entityManager.findOne(Post, { title: "Hello Post #1" });
expect(loadedPosts2!).not.to.be.empty;
loadedPosts2!.should.be.eql({
id: 2,
title: "Hello Post #1",
status: PostStatus.ACTIVE
});
// remove persisted
await connection.entityManager.remove(post2);
const post3 = new Post();
post3.status = PostStatus.ACHIEVED;
post3.title = "Hello Post #1";
// persist
await connection.entityManager.persist(post3);
const loadedPosts3 = await connection.entityManager.findOne(Post, { title: "Hello Post #1" });
expect(loadedPosts3!).not.to.be.empty;
loadedPosts3!.should.be.eql({
id: 3,
title: "Hello Post #1",
status: PostStatus.ACHIEVED
});
// remove persisted
await connection.entityManager.remove(post3);
})));
});
| ReaxDev/typeorm | test/github-issues/182/issue-182.ts | TypeScript | mit | 2,660 |
// nodejs按行读取文件流
var Stream = require('stream').Stream,
util = require('util');
var LineStream = function() {
this.writable = true;
this.readable = true;
this.buffer = '';
};
util.inherits(LineStream, Stream);
LineStream.prototype.write = function(data, encoding) {
if (Buffer.isBuffer(data)) {
data = data.toString(encoding || 'utf8');
}
var parts = data.split(/\n/g);
var len = parts.length;
for (var i = 0; i < len; i++) {
this.emit('data', parts[i]+'\n');
}
};
LineStream.prototype.end = function() {
if(this.buffer.length > 0){
this.emit('data',this.buffer);
this.buffer = '';
}
this.emit('end');
};
module.exports = LineStream;
| SBFE/js-combine-pack | lib/tool/lineStream.js | JavaScript | mit | 680 |
using LibrarySystem.Models;
using Microsoft.AspNet.Identity;
using Microsoft.AspNet.Identity.EntityFramework;
using System;
using System.Collections.Generic;
using System.Linq;
namespace LibrarySystem.Account
{
public partial class Manage : System.Web.UI.Page
{
protected string SuccessMessage
{
get;
private set;
}
protected bool CanRemoveExternalLogins
{
get;
private set;
}
protected void Page_Load()
{
if (!IsPostBack)
{
// Determine the sections to render
ILoginManager manager = new IdentityManager(new IdentityStore(new ApplicationDbContext())).Logins;
if (manager.HasLocalLogin(User.Identity.GetUserId()))
{
changePasswordHolder.Visible = true;
}
else
{
setPassword.Visible = true;
changePasswordHolder.Visible = false;
}
CanRemoveExternalLogins = manager.GetLogins(User.Identity.GetUserId()).Count() > 1;
// Render success message
var message = Request.QueryString["m"];
if (message != null)
{
// Strip the query string from action
Form.Action = ResolveUrl("~/Account/Manage");
SuccessMessage =
message == "ChangePwdSuccess" ? "Your password has been changed."
: message == "SetPwdSuccess" ? "Your password has been set."
: message == "RemoveLoginSuccess" ? "The account was removed."
: String.Empty;
successMessage.Visible = !String.IsNullOrEmpty(SuccessMessage);
}
}
}
protected void ChangePassword_Click(object sender, EventArgs e)
{
if (IsValid)
{
IPasswordManager manager = new IdentityManager(new IdentityStore(new ApplicationDbContext())).Passwords;
IdentityResult result = manager.ChangePassword(User.Identity.GetUserName(), CurrentPassword.Text, NewPassword.Text);
if (result.Success)
{
Response.Redirect("~/Account/Manage?m=ChangePwdSuccess");
}
else
{
AddErrors(result);
}
}
}
protected void SetPassword_Click(object sender, EventArgs e)
{
if (IsValid)
{
// Create the local login info and link the local account to the user
ILoginManager manager = new IdentityManager(new IdentityStore(new ApplicationDbContext())).Logins;
IdentityResult result = manager.AddLocalLogin(User.Identity.GetUserId(), User.Identity.GetUserName(), password.Text);
if (result.Success)
{
Response.Redirect("~/Account/Manage?m=SetPwdSuccess");
}
else
{
AddErrors(result);
}
}
}
public IEnumerable<IUserLogin> GetLogins()
{
ILoginManager manager = new IdentityManager(new IdentityStore(new ApplicationDbContext())).Logins;
var accounts = manager.GetLogins(User.Identity.GetUserId());
CanRemoveExternalLogins = accounts.Count() > 1;
return accounts;
}
public void RemoveLogin(string loginProvider, string providerKey)
{
ILoginManager manager = new IdentityManager(new IdentityStore(new ApplicationDbContext())).Logins;
var result = manager.RemoveLogin(User.Identity.GetUserId(), loginProvider, providerKey);
var msg = result.Success
? "?m=RemoveLoginSuccess"
: String.Empty;
Response.Redirect("~/Account/Manage" + msg);
}
private void AddErrors(IdentityResult result) {
foreach (var error in result.Errors) {
ModelState.AddModelError("", error);
}
}
}
} | krasimirkrustev/ta-library-system | LibrarySystem/LibrarySystem/Account/Manage.aspx.cs | C# | mit | 4,298 |
__author__ = "Guillaume"
__license__ = "MIT"
__copyright__ = "2015, ESRF"
import numpy
from freesas.model import SASModel
class Grid:
"""
This class is used to create a grid which include all the input models
"""
def __init__(self, inputfiles):
"""
:param inputfiles: list of pdb files needed for averaging
"""
self.inputs = inputfiles
self.size = []
self.nbknots = None
self.radius = None
self.coordknots = []
def __repr__(self):
return "Grid with %i knots"%self.nbknots
def spatial_extent(self):
"""
Calculate the maximal extent of input models
:return self.size: 6-list with x,y,z max and then x,y,z min
"""
atoms = []
models_fineness = []
for files in self.inputs:
m = SASModel(files)
if len(atoms)==0:
atoms = m.atoms
else:
atoms = numpy.append(atoms, m.atoms, axis=0)
models_fineness.append(m.fineness)
mean_fineness = sum(models_fineness) / len(models_fineness)
coordmin = atoms.min(axis=0) - mean_fineness
coordmax = atoms.max(axis=0) + mean_fineness
self.size = [coordmax[0],coordmax[1],coordmax[2],coordmin[0],coordmin[1],coordmin[2]]
return self.size
def calc_radius(self, nbknots=None):
"""
Calculate the radius of each point of a hexagonal close-packed grid,
knowing the total volume and the number of knots in this grid.
:param nbknots: number of knots wanted for the grid
:return radius: the radius of each knot of the grid
"""
if len(self.size)==0:
self.spatial_extent()
nbknots = nbknots if nbknots is not None else 5000
size = self.size
dx = size[0] - size[3]
dy = size[1] - size[4]
dz = size[2] - size[5]
volume = dx * dy * dz
density = numpy.pi / (3*2**0.5)
radius = ((3 /( 4 * numpy.pi)) * density * volume / nbknots)**(1.0/3)
self.radius = radius
return radius
def make_grid(self):
"""
Create a grid using the maximal size and the radius previously computed.
The geometry used is a face-centered cubic lattice (fcc).
:return knots: 2d-array, coordinates of each dot of the grid. Saved as self.coordknots.
"""
if len(self.size)==0:
self.spatial_extent()
if self.radius is None:
self.calc_radius()
radius = self.radius
a = numpy.sqrt(2.0)*radius
xmax = self.size[0]
xmin = self.size[3]
ymax = self.size[1]
ymin = self.size[4]
zmax = self.size[2]
zmin = self.size[5]
x = 0.0
y = 0.0
z = 0.0
xlist = []
ylist = []
zlist = []
knots = numpy.empty((1,4), dtype="float")
while (zmin + z) <= zmax:
zlist.append(z)
z += a
while (ymin + y) <= ymax:
ylist.append(y)
y += a
while (xmin + x) <= xmax:
xlist.append(x)
x += a
for i in range(len(zlist)):
z = zlist[i]
if i % 2 ==0:
for j in range(len(xlist)):
x = xlist[j]
if j % 2 == 0:
for y in ylist[0:-1:2]:
knots = numpy.append(knots, [[xmin+x, ymin+y, zmin+z, 0.0]], axis=0)
else:
for y in ylist[1:-1:2]:
knots = numpy.append(knots, [[xmin+x, ymin+y, zmin+z, 0.0]], axis=0)
else:
for j in range(len(xlist)):
x = xlist[j]
if j % 2 == 0:
for y in ylist[1:-1:2]:
knots = numpy.append(knots, [[xmin+x, ymin+y, zmin+z, 0.0]], axis=0)
else:
for y in ylist[0:-1:2]:
knots = numpy.append(knots, [[xmin+x, ymin+y, zmin+z, 0.0]], axis=0)
knots = numpy.delete(knots, 0, axis=0)
self.nbknots = knots.shape[0]
self.coordknots = knots
return knots
class AverModels():
"""
Provides tools to create an averaged models using several aligned dummy atom models
"""
def __init__(self, inputfiles, grid):
"""
:param inputfiles: list of pdb files of aligned models
:param grid: 2d-array coordinates of each point of a grid, fourth column full of zeros
"""
self.inputfiles = inputfiles
self.models = []
self.header = []
self.radius = None
self.atoms = []
self.grid = grid
def __repr__(self):
return "Average SAS model with %i atoms"%len(self.atoms)
def read_files(self, reference=None):
"""
Read all the pdb file in the inputfiles list, creating SASModels.
The SASModels created are save in a list, the reference model is the first model in the list.
:param reference: position of the reference model file in the inputfiles list
"""
ref = reference if reference is not None else 0
inputfiles = self.inputfiles
models = []
models.append(SASModel(inputfiles[ref]))
for i in range(len(inputfiles)):
if i==ref:
continue
else:
models.append(SASModel(inputfiles[i]))
self.models = models
return models
def calc_occupancy(self, griddot):
"""
Assign an occupancy and a contribution factor to the point of the grid.
:param griddot: 1d-array, coordinates of a point of the grid
:return tuple: 2-tuple containing (occupancy, contribution)
"""
occ = 0.0
contrib = 0
for model in self.models:
f = model.fineness
for i in range(model.atoms.shape[0]):
dx = model.atoms[i, 0] - griddot[0]
dy = model.atoms[i, 1] - griddot[1]
dz = model.atoms[i, 2] - griddot[2]
dist = dx * dx + dy * dy + dz * dz
add = max(1 - (dist / f), 0)
if add != 0:
contrib += 1
occ += add
return occ, contrib
def assign_occupancy(self):
"""
For each point of the grid, total occupancy and contribution factor are computed and saved.
The grid is then ordered with decreasing value of occupancy.
The fourth column of the array correspond to the occupancy of the point and the fifth to
the contribution for this point.
:return sortedgrid: 2d-array, coordinates of each point of the grid
"""
grid = self.grid
nbknots = grid.shape[0]
grid = numpy.append(grid, numpy.zeros((nbknots, 1), dtype="float"), axis=1)
for i in range(nbknots):
occ, contrib = self.calc_occupancy(grid[i, 0:3])
grid[i, 3] = occ
grid[i, 4] = contrib
order = numpy.argsort(grid, axis=0)[:, -2]
sortedgrid = numpy.empty_like(grid)
for i in range(nbknots):
sortedgrid[nbknots - i - 1, :] = grid[order[i], :]
return sortedgrid
def make_header(self):
"""
Create the layout of the pdb file for the averaged model.
"""
header = []
header.append("Number of files averaged : %s\n"%len(self.inputfiles))
for i in self.inputfiles:
header.append(i + "\n")
header.append("Total number of dots in the grid : %s\n"%self.grid.shape[0])
decade = 1
for i in range(self.grid.shape[0]):
line = "ATOM CA ASP 1 20.00 2 201\n"
line = line[:7] + "%4.i"%(i + 1) + line[11:]
if not (i + 1) % 10:
decade += 1
line = line[:21] + "%4.i"%decade + line[25:]
header.append(line)
self.header = header
return header
def save_aver(self, filename):
"""
Save the position of each occupied dot of the grid, its occupancy and its contribution
in a pdb file.
:param filename: name of the pdb file to write
"""
if len(self.header) == 0:
self.make_header()
assert self.grid.shape[-1] == 5
nr = 0
with open(filename, "w") as pdbout:
for line in self.header:
if line.startswith("ATOM"):
if nr < self.grid.shape[0] and self.grid[nr, 4] != 0:
coord = "%8.3f%8.3f%8.3f" % tuple(self.grid[nr, 0:3])
occ = "%6.2f" % self.grid[nr, 3]
contrib = "%2.f" % self.grid[nr, 4]
line = line[:30] + coord + occ + line[60:66] + contrib + line[68:]
else:
line = ""
nr += 1
pdbout.write(line)
| kif/freesas | freesas/average.py | Python | mit | 9,116 |
// Based on "Design Patterns: Elements of Reusable Object-Oriented Software"
// book by Erich Gamma, John Vlissides, Ralph Johnson, and Richard Helm
//
// Created by Bartosz Rachwal. The National Institute of Advanced Industrial Science and Technology, Japan.
#include "btree.h"
namespace structural
{
namespace flyweight
{
BTree::BTree(long size) : size_(size)
{
leafs_ = new operational::iterator::List<Font*>();
nodes_ = new int[size_];
for (auto i = 0; i < size_; i++)
{
nodes_[i] = -1;
}
}
BTree::BTree() : BTree(1000) { }
void BTree::Set(Font* font, const int& index, const int& span) const
{
auto font_index = -1;
for (auto i = 0; i < leafs_->Count(); i++)
{
if (&leafs_->Get(i) == &font)
{
font_index = i;
}
}
if (font_index == -1)
{
leafs_->Append(font);
font_index = leafs_->Count() - 1;
}
for (auto j = index; j < index + span; j++)
{
nodes_[j] = font_index;
}
}
Font *BTree::Get(int index) const
{
if (index > size_)
{
return nullptr;
}
auto font_index = nodes_[index];
if (font_index == -1)
{
return nullptr;
}
return leafs_->Get(font_index);
}
}
}
| rachwal/DesignPatterns | structural/src/flyweight/btree.cc | C++ | mit | 1,119 |
Enefele::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Configure static asset server for tests with Cache-Control for performance.
config.serve_static_assets = true
config.static_cache_control = "public, max-age=3600"
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
end
| anamartinez/enefele | config/environments/test.rb | Ruby | mit | 1,561 |
<!doctype html>
<html class="no-js" lang="">
<head>
<title>Zabuun - Learn Egyptian Arabic for English speakers</title>
<meta name="description" content="">
<?php include $_SERVER['DOCUMENT_ROOT'].'/layout/head.php';?>
</head>
<body>
<?php include $_SERVER['DOCUMENT_ROOT'].'/layout/ie8.php';?>
<?php include $_SERVER['DOCUMENT_ROOT'].'/layout/header.php';?>
<div class="content">
<?php include $_SERVER['DOCUMENT_ROOT'].'/layout/side.php';?>
<div class="main">
<div class="location">
<p class="breadcrumbs">Essays > The First Song</p>
<p class="expandcollapse">
<a href="">Expand All</a> | <a href="">Collapse All</a>
</p>
</div>
<!-- begin essay -->
<h1>The First Song</h1>
<p> She sits in the car. Her dad turns on the radio. A song plays. She taps her feet. She sways her head. Her dad laughs at her. He likes the song too. The song is over. The radio plays a different song. She does not like the new song. She sits quietly. </p>
<!-- end essay -->
</div>
</div>
<?php include $_SERVER['DOCUMENT_ROOT'].'/layout/footer.php';?>
<?php include $_SERVER['DOCUMENT_ROOT'].'/layout/scripts.php';?>
</body>
</html> | javanigus/zabuun | essay/0006-the-first-song.php | PHP | mit | 1,193 |
package main
import (
"os"
"github.com/codegangsta/cli"
)
func main() {
app := cli.NewApp()
app.Name = "nano-client"
app.Usage = "Send a request to service"
app.Version = Version
app.Flags = []cli.Flag{
cli.StringFlag{
Name: "service, s",
Usage: "Service endpoint to send request to (Required)",
},
cli.StringFlag{
Name: "method, m",
Usage: "RPC method to call (Required)",
},
cli.StringFlag{
Name: "params, p",
Usage: "Parameters as JSON (Required)",
},
}
//app.Action = SendRequest
app.Run(os.Args)
}
| mouadino/go-nano | cli/nano-client/main.go | GO | mit | 551 |
import logger from './logger';
import app from './app';
const port = app.get('port');
const server = app.listen(port);
process.on('unhandledRejection', (reason, p) =>
logger.error('Unhandled Rejection at: Promise ', p, reason)
);
server.on('listening', () =>
logger.info('Feathers application started on http://%s:%d', app.get('host'), port)
);
| feathersjs/generator-feathers | generators/app/templates/ts/src/index.ts | TypeScript | mit | 352 |
// Template Source: BaseEntityCollectionResponse.java.tt
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
package com.microsoft.graph.requests;
import com.microsoft.graph.models.ContactFolder;
import com.microsoft.graph.http.BaseCollectionResponse;
// **NOTE** This file was generated by a tool and any changes will be overwritten.
/**
* The class for the Contact Folder Collection Response.
*/
public class ContactFolderCollectionResponse extends BaseCollectionResponse<ContactFolder> {
}
| microsoftgraph/msgraph-sdk-java | src/main/java/com/microsoft/graph/requests/ContactFolderCollectionResponse.java | Java | mit | 765 |
/*
* Copyright 2013 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*package com.google.zxing.common.reedsolomon;*/
import * as assert from 'assert';
import { ZXingStringBuilder } from '@zxing/library';
import Random from '../../../core/util/Random';
import { ZXingSystem } from '@zxing/library';
import { GenericGF } from '@zxing/library';
import { ReedSolomonEncoder } from '@zxing/library';
import { ReedSolomonDecoder } from '@zxing/library';
/*import java.util.Random;*/
import { corrupt } from './ReedSolomonCorrupt';
/**
* @author Rustam Abdullaev
*/
describe('ReedSolomonSpec', () => {
it('testDataMatrix 1 - real life test case', () => {
testEncodeDecode(
GenericGF.DATA_MATRIX_FIELD_256,
Int32Array.from([142, 164, 186]),
Int32Array.from([114, 25, 5, 88, 102])
);
});
it('testDataMatrix 2 - real life test case', () => {
testEncodeDecode(
GenericGF.DATA_MATRIX_FIELD_256,
Int32Array.from([
0x69, 0x75, 0x75, 0x71, 0x3B, 0x30, 0x30, 0x64,
0x70, 0x65, 0x66, 0x2F, 0x68, 0x70, 0x70, 0x68,
0x6D, 0x66, 0x2F, 0x64, 0x70, 0x6E, 0x30, 0x71,
0x30, 0x7B, 0x79, 0x6A, 0x6F, 0x68, 0x30, 0x81,
0xF0, 0x88, 0x1F, 0xB5
]),
Int32Array.from([
0x1C, 0x64, 0xEE, 0xEB, 0xD0, 0x1D, 0x00, 0x03,
0xF0, 0x1C, 0xF1, 0xD0, 0x6D, 0x00, 0x98, 0xDA,
0x80, 0x88, 0xBE, 0xFF, 0xB7, 0xFA, 0xA9, 0x95
])
);
});
it('testDataMatrix 3.1 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.DATA_MATRIX_FIELD_256, 10, 240);
});
it('testDataMatrix 3.2 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.DATA_MATRIX_FIELD_256, 128, 127);
});
it('testDataMatrix 3.3 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.DATA_MATRIX_FIELD_256, 220, 35);
});
it('testQRCode 1 - from example given in ISO 18004, Annex I', () => {
// Test case from example given in ISO 18004, Annex I
testEncodeDecode(
GenericGF.QR_CODE_FIELD_256,
Int32Array.from([
0x10, 0x20, 0x0C, 0x56, 0x61, 0x80, 0xEC, 0x11,
0xEC, 0x11, 0xEC, 0x11, 0xEC, 0x11, 0xEC, 0x11
]),
Int32Array.from([
0xA5, 0x24, 0xD4, 0xC1, 0xED, 0x36, 0xC7, 0x87,
0x2C, 0x55
])
);
});
it('testQRCode 2 - real life test case', () => {
testEncodeDecode(
GenericGF.QR_CODE_FIELD_256,
Int32Array.from([
0x72, 0x67, 0x2F, 0x77, 0x69, 0x6B, 0x69, 0x2F,
0x4D, 0x61, 0x69, 0x6E, 0x5F, 0x50, 0x61, 0x67,
0x65, 0x3B, 0x3B, 0x00, 0xEC, 0x11, 0xEC, 0x11,
0xEC, 0x11, 0xEC, 0x11, 0xEC, 0x11, 0xEC, 0x11
]),
Int32Array.from([
0xD8, 0xB8, 0xEF, 0x14, 0xEC, 0xD0, 0xCC, 0x85,
0x73, 0x40, 0x0B, 0xB5, 0x5A, 0xB8, 0x8B, 0x2E,
0x08, 0x62
])
);
});
it('testQRCode 3.1 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.QR_CODE_FIELD_256, 10, 240);
});
it('testQRCode 3.2 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.QR_CODE_FIELD_256, 128, 127);
});
it('testQRCode 3.3 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.QR_CODE_FIELD_256, 220, 35);
});
it('testAztec 1 - real life test case', () => {
testEncodeDecode(
GenericGF.AZTEC_PARAM,
Int32Array.from([0x5, 0x6]),
Int32Array.from([0x3, 0x2, 0xB, 0xB, 0x7])
);
});
it('testAztec 2 - real life test case', () => {
testEncodeDecode(
GenericGF.AZTEC_PARAM,
Int32Array.from([0x0, 0x0, 0x0, 0x9]),
Int32Array.from([0xA, 0xD, 0x8, 0x6, 0x5, 0x6])
);
});
it('testAztec 3 - real life test case', () => {
testEncodeDecode(
GenericGF.AZTEC_PARAM,
Int32Array.from([0x2, 0x8, 0x8, 0x7]),
Int32Array.from([0xE, 0xC, 0xA, 0x9, 0x6, 0x8])
);
});
it('testAztec 4 - real life test case', () => {
testEncodeDecode(
GenericGF.AZTEC_DATA_6,
Int32Array.from([0x9, 0x32, 0x1, 0x29, 0x2F, 0x2, 0x27, 0x25, 0x1, 0x1B]),
Int32Array.from([0x2C, 0x2, 0xD, 0xD, 0xA, 0x16, 0x28, 0x9, 0x22, 0xA, 0x14])
);
});
it('testAztec 5 - real life test case', () => {
testEncodeDecode(
GenericGF.AZTEC_DATA_8,
Int32Array.from([
0xE0, 0x86, 0x42, 0x98, 0xE8, 0x4A, 0x96, 0xC6,
0xB9, 0xF0, 0x8C, 0xA7, 0x4A, 0xDA, 0xF8, 0xCE,
0xB7, 0xDE, 0x88, 0x64, 0x29, 0x8E, 0x84, 0xA9,
0x6C, 0x6B, 0x9F, 0x08, 0xCA, 0x74, 0xAD, 0xAF,
0x8C, 0xEB, 0x7C, 0x10, 0xC8, 0x53, 0x1D, 0x09,
0x52, 0xD8, 0xD7, 0x3E, 0x11, 0x94, 0xE9, 0x5B,
0x5F, 0x19, 0xD6, 0xFB, 0xD1, 0x0C, 0x85, 0x31,
0xD0, 0x95, 0x2D, 0x8D, 0x73, 0xE1, 0x19, 0x4E,
0x95, 0xB5, 0xF1, 0x9D, 0x6F]),
Int32Array.from([
0x31, 0xD7, 0x04, 0x46, 0xB2, 0xC1, 0x06, 0x94,
0x17, 0xE5, 0x0C, 0x2B, 0xA3, 0x99, 0x15, 0x7F,
0x16, 0x3C, 0x66, 0xBA, 0x33, 0xD9, 0xE8, 0x87,
0x86, 0xBB, 0x4B, 0x15, 0x4E, 0x4A, 0xDE, 0xD4,
0xED, 0xA1, 0xF8, 0x47, 0x2A, 0x50, 0xA6, 0xBC,
0x53, 0x7D, 0x29, 0xFE, 0x06, 0x49, 0xF3, 0x73,
0x9F, 0xC1, 0x75])
);
});
it('testAztec 6 - real life test case', () => {
testEncodeDecode(
GenericGF.AZTEC_DATA_10,
Int32Array.from([
0x15C, 0x1E1, 0x2D5, 0x02E, 0x048, 0x1E2, 0x037, 0x0CD,
0x02E, 0x056, 0x26A, 0x281, 0x1C2, 0x1A6, 0x296, 0x045,
0x041, 0x0AA, 0x095, 0x2CE, 0x003, 0x38F, 0x2CD, 0x1A2,
0x036, 0x1AD, 0x04E, 0x090, 0x271, 0x0D3, 0x02E, 0x0D5,
0x2D4, 0x032, 0x2CA, 0x281, 0x0AA, 0x04E, 0x024, 0x2D3,
0x296, 0x281, 0x0E2, 0x08A, 0x1AA, 0x28A, 0x280, 0x07C,
0x286, 0x0A1, 0x1D0, 0x1AD, 0x154, 0x032, 0x2C2, 0x1C1,
0x145, 0x02B, 0x2D4, 0x2B0, 0x033, 0x2D5, 0x276, 0x1C1,
0x282, 0x10A, 0x2B5, 0x154, 0x003, 0x385, 0x20F, 0x0C4,
0x02D, 0x050, 0x266, 0x0D5, 0x033, 0x2D5, 0x276, 0x1C1,
0x0D4, 0x2A0, 0x08F, 0x0C4, 0x024, 0x20F, 0x2E2, 0x1AD,
0x154, 0x02E, 0x056, 0x26A, 0x281, 0x090, 0x1E5, 0x14E,
0x0CF, 0x2B6, 0x1C1, 0x28A, 0x2A1, 0x04E, 0x0D5, 0x003,
0x391, 0x122, 0x286, 0x1AD, 0x2D4, 0x028, 0x262, 0x2EA,
0x0A2, 0x004, 0x176, 0x295, 0x201, 0x0D5, 0x024, 0x20F,
0x116, 0x0C1, 0x056, 0x095, 0x213, 0x004, 0x1EA, 0x28A,
0x02A, 0x234, 0x2CE, 0x037, 0x157, 0x0D3, 0x262, 0x026,
0x262, 0x2A0, 0x086, 0x106, 0x2A1, 0x126, 0x1E5, 0x266,
0x26A, 0x2A1, 0x0E6, 0x1AA, 0x281, 0x2B6, 0x271, 0x154,
0x02F, 0x0C4, 0x02D, 0x213, 0x0CE, 0x003, 0x38F, 0x2CD,
0x1A2, 0x036, 0x1B5, 0x26A, 0x086, 0x280, 0x086, 0x1AA,
0x2A1, 0x226, 0x1AD, 0x0CF, 0x2A6, 0x292, 0x2C6, 0x022,
0x1AA, 0x256, 0x0D5, 0x02D, 0x050, 0x266, 0x0D5, 0x004,
0x176, 0x295, 0x201, 0x0D3, 0x055, 0x031, 0x2CD, 0x2EA,
0x1E2, 0x261, 0x1EA, 0x28A, 0x004, 0x145, 0x026, 0x1A6,
0x1C6, 0x1F5, 0x2CE, 0x034, 0x051, 0x146, 0x1E1, 0x0B0,
0x1B0, 0x261, 0x0D5, 0x025, 0x142, 0x1C0, 0x07C, 0x0B0,
0x1E6, 0x081, 0x044, 0x02F, 0x2CF, 0x081, 0x290, 0x0A2,
0x1A6, 0x281, 0x0CD, 0x155, 0x031, 0x1A2, 0x086, 0x262,
0x2A1, 0x0CD, 0x0CA, 0x0E6, 0x1E5, 0x003, 0x394, 0x0C5,
0x030, 0x26F, 0x053, 0x0C1, 0x1B6, 0x095, 0x2D4, 0x030,
0x26F, 0x053, 0x0C0, 0x07C, 0x2E6, 0x295, 0x143, 0x2CD,
0x2CE, 0x037, 0x0C9, 0x144, 0x2CD, 0x040, 0x08E, 0x054,
0x282, 0x022, 0x2A1, 0x229, 0x053, 0x0D5, 0x262, 0x027,
0x26A, 0x1E8, 0x14D, 0x1A2, 0x004, 0x26A, 0x296, 0x281,
0x176, 0x295, 0x201, 0x0E2, 0x2C4, 0x143, 0x2D4, 0x026,
0x262, 0x2A0, 0x08F, 0x0C4, 0x031, 0x213, 0x2B5, 0x155,
0x213, 0x02F, 0x143, 0x121, 0x2A6, 0x1AD, 0x2D4, 0x034,
0x0C5, 0x026, 0x295, 0x003, 0x396, 0x2A1, 0x176, 0x295,
0x201, 0x0AA, 0x04E, 0x004, 0x1B0, 0x070, 0x275, 0x154,
0x026, 0x2C1, 0x2B3, 0x154, 0x2AA, 0x256, 0x0C1, 0x044,
0x004, 0x23F
]),
Int32Array.from([
0x379, 0x099, 0x348, 0x010, 0x090, 0x196, 0x09C, 0x1FF,
0x1B0, 0x32D, 0x244, 0x0DE, 0x201, 0x386, 0x163, 0x11F,
0x39B, 0x344, 0x3FE, 0x02F, 0x188, 0x113, 0x3D9, 0x102,
0x04A, 0x2E1, 0x1D1, 0x18E, 0x077, 0x262, 0x241, 0x20D,
0x1B8, 0x11D, 0x0D0, 0x0A5, 0x29C, 0x24D, 0x3E7, 0x006,
0x2D0, 0x1B7, 0x337, 0x178, 0x0F1, 0x1E0, 0x00B, 0x01E,
0x0DA, 0x1C6, 0x2D9, 0x00D, 0x28B, 0x34A, 0x252, 0x27A,
0x057, 0x0CA, 0x2C2, 0x2E4, 0x3A6, 0x0E3, 0x22B, 0x307,
0x174, 0x292, 0x10C, 0x1ED, 0x2FD, 0x2D4, 0x0A7, 0x051,
0x34F, 0x07A, 0x1D5, 0x01D, 0x22E, 0x2C2, 0x1DF, 0x08F,
0x105, 0x3FE, 0x286, 0x2A2, 0x3B1, 0x131, 0x285, 0x362,
0x315, 0x13C, 0x0F9, 0x1A2, 0x28D, 0x246, 0x1B3, 0x12C,
0x2AD, 0x0F8, 0x222, 0x0EC, 0x39F, 0x358, 0x014, 0x229,
0x0C8, 0x360, 0x1C2, 0x031, 0x098, 0x041, 0x3E4, 0x046,
0x332, 0x318, 0x2E3, 0x24E, 0x3E2, 0x1E1, 0x0BE, 0x239,
0x306, 0x3A5, 0x352, 0x351, 0x275, 0x0ED, 0x045, 0x229,
0x0BF, 0x05D, 0x253, 0x1BE, 0x02E, 0x35A, 0x0E4, 0x2E9,
0x17A, 0x166, 0x03C, 0x007
])
);
});
it('testAztec 7 - real life test case', () => {
testEncodeDecode(
GenericGF.AZTEC_DATA_12,
Int32Array.from([
0x571, 0xE1B, 0x542, 0xE12, 0x1E2, 0x0DC, 0xCD0, 0xB85,
0x69A, 0xA81, 0x709, 0xA6A, 0x584, 0x510, 0x4AA, 0x256,
0xCE0, 0x0F8, 0xFB3, 0x5A2, 0x0D9, 0xAD1, 0x389, 0x09C,
0x4D3, 0x0B8, 0xD5B, 0x503, 0x2B2, 0xA81, 0x2A8, 0x4E0,
0x92D, 0x3A5, 0xA81, 0x388, 0x8A6, 0xAA8, 0xAA0, 0x07C,
0xA18, 0xA17, 0x41A, 0xD55, 0x032, 0xB09, 0xC15, 0x142,
0xBB5, 0x2B0, 0x0CE, 0xD59, 0xD9C, 0x1A0, 0x90A, 0xAD5,
0x540, 0x0F8, 0x583, 0xCC4, 0x0B4, 0x509, 0x98D, 0x50C,
0xED5, 0x9D9, 0xC13, 0x52A, 0x023, 0xCC4, 0x092, 0x0FB,
0x89A, 0xD55, 0x02E, 0x15A, 0x6AA, 0x049, 0x079, 0x54E,
0x33E, 0xB67, 0x068, 0xAA8, 0x44E, 0x354, 0x03E, 0x452,
0x2A1, 0x9AD, 0xB50, 0x289, 0x8AE, 0xA28, 0x804, 0x5DA,
0x958, 0x04D, 0x509, 0x20F, 0x458, 0xC11, 0x589, 0x584,
0xC04, 0x7AA, 0x8A0, 0xAA3, 0x4B3, 0x837, 0x55C, 0xD39,
0x882, 0x698, 0xAA0, 0x219, 0x06A, 0x852, 0x679, 0x666,
0x9AA, 0xA13, 0x99A, 0xAA0, 0x6B6, 0x9C5, 0x540, 0xBCC,
0x40B, 0x613, 0x338, 0x03E, 0x3EC, 0xD68, 0x836, 0x6D6,
0x6A2, 0x1A8, 0x021, 0x9AA, 0xA86, 0x266, 0xB4C, 0xFA9,
0xA92, 0xB18, 0x226, 0xAA5, 0x635, 0x42D, 0x142, 0x663,
0x540, 0x45D, 0xA95, 0x804, 0xD31, 0x543, 0x1B3, 0x6EA,
0x78A, 0x617, 0xAA8, 0xA01, 0x145, 0x099, 0xA67, 0x19F,
0x5B3, 0x834, 0x145, 0x467, 0x84B, 0x06C, 0x261, 0x354,
0x255, 0x09C, 0x01F, 0x0B0, 0x798, 0x811, 0x102, 0xFB3,
0xC81, 0xA40, 0xA26, 0x9A8, 0x133, 0x555, 0x0C5, 0xA22,
0x1A6, 0x2A8, 0x4CD, 0x328, 0xE67, 0x940, 0x3E5, 0x0C5,
0x0C2, 0x6F1, 0x4CC, 0x16D, 0x895, 0xB50, 0x309, 0xBC5,
0x330, 0x07C, 0xB9A, 0x955, 0x0EC, 0xDB3, 0x837, 0x325,
0x44B, 0x344, 0x023, 0x854, 0xA08, 0x22A, 0x862, 0x914,
0xCD5, 0x988, 0x279, 0xA9E, 0x853, 0x5A2, 0x012, 0x6AA,
0x5A8, 0x15D, 0xA95, 0x804, 0xE2B, 0x114, 0x3B5, 0x026,
0x98A, 0xA02, 0x3CC, 0x40C, 0x613, 0xAD5, 0x558, 0x4C2,
0xF50, 0xD21, 0xA99, 0xADB, 0x503, 0x431, 0x426, 0xA54,
0x03E, 0x5AA, 0x15D, 0xA95, 0x804, 0xAA1, 0x380, 0x46C,
0x070, 0x9D5, 0x540, 0x9AC, 0x1AC, 0xD54, 0xAAA, 0x563,
0x044, 0x401, 0x220, 0x9F1, 0x4F0, 0xDAA, 0x170, 0x90F,
0x106, 0xE66, 0x85C, 0x2B4, 0xD54, 0x0B8, 0x4D3, 0x52C,
0x228, 0x825, 0x512, 0xB67, 0x007, 0xC7D, 0x9AD, 0x106,
0xCD6, 0x89C, 0x484, 0xE26, 0x985, 0xC6A, 0xDA8, 0x195,
0x954, 0x095, 0x427, 0x049, 0x69D, 0x2D4, 0x09C, 0x445,
0x355, 0x455, 0x003, 0xE50, 0xC50, 0xBA0, 0xD6A, 0xA81,
0x958, 0x4E0, 0xA8A, 0x15D, 0xA95, 0x806, 0x76A, 0xCEC,
0xE0D, 0x048, 0x556, 0xAAA, 0x007, 0xC2C, 0x1E6, 0x205,
0xA28, 0x4CC, 0x6A8, 0x676, 0xACE, 0xCE0, 0x9A9, 0x501,
0x1E6, 0x204, 0x907, 0xDC4, 0xD6A, 0xA81, 0x70A, 0xD35,
0x502, 0x483, 0xCAA, 0x719, 0xF5B, 0x383, 0x455, 0x422,
0x71A, 0xA01, 0xF22, 0x915, 0x0CD, 0x6DA, 0x814, 0x4C5,
0x751, 0x440, 0x22E, 0xD4A, 0xC02, 0x6A8, 0x490, 0x7A2,
0xC60, 0x8AC, 0x4AC, 0x260, 0x23D, 0x545, 0x055, 0x1A5,
0x9C1, 0xBAA, 0xE69, 0xCC4, 0x134, 0xC55, 0x010, 0xC83,
0x542, 0x933, 0xCB3, 0x34D, 0x550, 0x9CC, 0xD55, 0x035,
0xB4E, 0x2AA, 0x05E, 0x620, 0x5B0, 0x999, 0xC01, 0xF1F,
0x66B, 0x441, 0xB36, 0xB35, 0x10D, 0x401, 0x0CD, 0x554,
0x313, 0x35A, 0x67D, 0x4D4, 0x958, 0xC11, 0x355, 0x2B1,
0xAA1, 0x68A, 0x133, 0x1AA, 0x022, 0xED4, 0xAC0, 0x269,
0x8AA, 0x18D, 0x9B7, 0x53C, 0x530, 0xBD5, 0x450, 0x08A,
0x284, 0xCD3, 0x38C, 0xFAD, 0x9C1, 0xA0A, 0x2A3, 0x3C2,
0x583, 0x613, 0x09A, 0xA12, 0xA84, 0xE00, 0xF85, 0x83C,
0xC40, 0x888, 0x17D, 0x9E4, 0x0D2, 0x051, 0x34D, 0x409,
0x9AA, 0xA86, 0x2D1, 0x10D, 0x315, 0x426, 0x699, 0x473,
0x3CA, 0x01F, 0x286, 0x286, 0x137, 0x8A6, 0x60B, 0x6C4,
0xADA, 0x818, 0x4DE, 0x299, 0x803, 0xE5C, 0xD4A, 0xA87,
0x66D, 0x9C1, 0xB99, 0x2A2, 0x59A, 0x201, 0x1C2, 0xA50,
0x411, 0x543, 0x148, 0xA66, 0xACC, 0x413, 0xCD4, 0xF42,
0x9AD, 0x100, 0x935, 0x52D, 0x40A, 0xED4, 0xAC0, 0x271,
0x588, 0xA1D, 0xA81, 0x34C, 0x550, 0x11E, 0x620, 0x630,
0x9D6, 0xAAA, 0xC26, 0x17A, 0x869, 0x0D4, 0xCD6, 0xDA8,
0x1A1, 0x8A1, 0x352, 0xA01, 0xF2D, 0x50A, 0xED4, 0xAC0,
0x255, 0x09C, 0x023, 0x603, 0x84E, 0xAAA, 0x04D, 0x60D,
0x66A, 0xA55, 0x52B, 0x182, 0x220, 0x091, 0x00F, 0x8A7,
0x86D, 0x50B, 0x848, 0x788, 0x373, 0x342, 0xE15, 0xA6A,
0xA05, 0xC26, 0x9A9, 0x611, 0x441, 0x2A8, 0x95B, 0x380,
0x3E3, 0xECD, 0x688, 0x366, 0xB44, 0xE24, 0x271, 0x34C,
0x2E3, 0x56D, 0x40C, 0xACA, 0xA04, 0xAA1, 0x382, 0x4B4,
0xE96, 0xA04, 0xE22, 0x29A, 0xAA2, 0xA80, 0x1F2, 0x862,
0x85D, 0x06B, 0x554, 0x0CA, 0xC27, 0x054, 0x50A, 0xED4,
0xAC0, 0x33B, 0x567, 0x670, 0x682, 0x42A, 0xB55, 0x500,
0x3E1, 0x60F, 0x310, 0x2D1, 0x426, 0x635, 0x433, 0xB56,
0x767, 0x04D, 0x4A8, 0x08F, 0x310, 0x248, 0x3EE, 0x26B,
0x554, 0x0B8, 0x569, 0xAA8, 0x124, 0x1E5, 0x538, 0xCFA,
0xD9C, 0x1A2, 0xAA1, 0x138, 0xD50, 0x0F9, 0x148, 0xA86,
0x6B6, 0xD40, 0xA26, 0x2BA, 0x8A2, 0x011, 0x76A, 0x560,
0x135, 0x424, 0x83D, 0x163, 0x045, 0x625, 0x613, 0x011,
0xEAA, 0x282, 0xA8D, 0x2CE, 0x0DD, 0x573, 0x4E6, 0x209,
0xA62, 0xA80, 0x864, 0x1AA, 0x149, 0x9E5, 0x99A, 0x6AA,
0x84E, 0x66A, 0xA81, 0xADA, 0x715, 0x502, 0xF31, 0x02D,
0x84C, 0xCE0, 0x0F8, 0xFB3, 0x5A2, 0x0D9, 0xB59, 0xA88,
0x6A0, 0x086, 0x6AA, 0xA18, 0x99A, 0xD33, 0xEA6, 0xA4A,
0xC60, 0x89A, 0xA95, 0x8D5, 0x0B4, 0x509, 0x98D, 0x501,
0x176, 0xA56, 0x013, 0x4C5, 0x50C, 0x6CD, 0xBA9, 0xE29,
0x85E, 0xAA2, 0x804, 0x514, 0x266, 0x99C, 0x67D, 0x6CE,
0x0D0, 0x515, 0x19E, 0x12C, 0x1B0, 0x984, 0xD50, 0x954,
0x270, 0x07C, 0x2C1, 0xE62, 0x044, 0x40B, 0xECF, 0x206,
0x902, 0x89A, 0x6A0, 0x4CD, 0x554, 0x316, 0x888, 0x698,
0xAA1, 0x334, 0xCA3, 0x99E, 0x500, 0xF94, 0x314, 0x309,
0xBC5, 0x330, 0x5B6, 0x256, 0xD40, 0xC26, 0xF14, 0xCC0,
0x1F2, 0xE6A, 0x554, 0x3B3, 0x6CE, 0x0DC, 0xC95, 0x12C,
0xD10, 0x08E, 0x152, 0x820, 0x8AA, 0x18A, 0x453, 0x356,
0x620, 0x9E6, 0xA7A, 0x14D, 0x688, 0x049, 0xAA9, 0x6A0,
0x576, 0xA56, 0x013, 0x8AC, 0x450, 0xED4, 0x09A, 0x62A,
0x808, 0xF31, 0x031, 0x84E, 0xB55, 0x561, 0x30B, 0xD43,
0x486, 0xA66, 0xB6D, 0x40D, 0x0C5, 0x09A, 0x950, 0x0F9,
0x6A8, 0x576, 0xA56, 0x012, 0xA84, 0xE01, 0x1B0, 0x1C2,
0x755, 0x502, 0x6B0, 0x6B3, 0x552, 0xAA9, 0x58C, 0x111,
0x004, 0x882, 0x7C5, 0x3C3, 0x6A8, 0x5C2, 0x43C, 0x41B,
0x99A, 0x170, 0xAD3, 0x550, 0x2E1, 0x34D, 0x4B0, 0x8A2,
0x095, 0x44A, 0xD9C, 0x01F, 0x1F6, 0x6B4, 0x41B, 0x35A,
0x271, 0x213, 0x89A, 0x617, 0x1AB, 0x6A0, 0x656, 0x550,
0x255, 0x09C, 0x125, 0xA74, 0xB50, 0x271, 0x114, 0xD55,
0x154, 0x00F, 0x943, 0x142, 0xE83, 0x5AA, 0xA06, 0x561,
0x382, 0xA28, 0x576, 0xA56, 0x019, 0xDAB, 0x3B3, 0x834,
0x121, 0x55A, 0xAA8, 0x01F, 0x0B0, 0x798, 0x816, 0x8A1,
0x331, 0xAA1, 0x9DA, 0xB3B, 0x382, 0x6A5, 0x404, 0x798,
0x812, 0x41F, 0x713, 0x5AA, 0xA05, 0xC2B, 0x4D5, 0x409,
0x20F, 0x2A9, 0xC67, 0xD6C, 0xE0D, 0x155, 0x089, 0xC6A,
0x807, 0xC8A, 0x454, 0x335, 0xB6A, 0x051, 0x315, 0xD45,
0x100, 0x8BB, 0x52B, 0x009, 0xAA1, 0x241, 0xE8B, 0x182,
0x2B1, 0x2B0, 0x980, 0x8F5, 0x514, 0x154, 0x696, 0x706,
0xEAB, 0x9A7, 0x310, 0x4D3, 0x154, 0x043, 0x20D, 0x50A,
0x4CF, 0x2CC, 0xD35, 0x542, 0x733, 0x554, 0x0D6, 0xD38,
0xAA8, 0x179, 0x881, 0x6C2, 0x667, 0x007, 0xC7D, 0x9AD,
0x106, 0xCDA, 0xCD4, 0x435, 0x004, 0x335, 0x550, 0xC4C,
0xD69, 0x9F5, 0x352, 0x563, 0x044, 0xD54, 0xAC6, 0xA85,
0xA28, 0x4CC, 0x6A8, 0x08B, 0xB52, 0xB00, 0x9A6, 0x2A8,
0x636, 0x6DD, 0x4F1, 0x4C2, 0xF55, 0x140, 0x228, 0xA13,
0x34C, 0xE33, 0xEB6, 0x706, 0x828, 0xA8C, 0xF09, 0x60D,
0x84C, 0x26A, 0x84A, 0xA13, 0x803, 0xE16, 0x0F3, 0x102,
0x220, 0x5F6, 0x790, 0x348, 0x144, 0xD35, 0x026, 0x6AA,
0xA18, 0xB44, 0x434, 0xC55, 0x099, 0xA65, 0x1CC, 0xF28,
0x07C, 0xA18, 0xA18, 0x4DE, 0x299, 0x82D, 0xB12, 0xB6A,
0x061, 0x378, 0xA66, 0x00F, 0x973, 0x52A, 0xA1D, 0x9B6,
0x706, 0xE64, 0xA89, 0x668, 0x804, 0x70A, 0x941, 0x045,
0x50C, 0x522, 0x99A, 0xB31, 0x04F, 0x353, 0xD0A, 0x6B4,
0x402, 0x4D5, 0x4B5, 0x02B, 0xB52, 0xB00, 0x9C5, 0x622,
0x876, 0xA04, 0xD31, 0x540, 0x479, 0x881, 0x8C2, 0x75A,
0xAAB, 0x098, 0x5EA, 0x1A4, 0x353, 0x35B, 0x6A0, 0x686,
0x284, 0xD4A, 0x807, 0xCB5, 0x42B, 0xB52, 0xB00, 0x954,
0x270, 0x08D, 0x80E, 0x13A, 0xAA8, 0x135, 0x835, 0x9AA,
0x801, 0xF14, 0xF0D, 0xAA1, 0x709, 0x0F1, 0x06E, 0x668,
0x5C2, 0xB4D, 0x540, 0xB84, 0xD35, 0x2C2, 0x288, 0x255,
0x12B, 0x670, 0x07C, 0x7D9, 0xAD1, 0x06C, 0xD68, 0x9C4,
0x84E, 0x269, 0x85C, 0x6AD, 0xA81, 0x959, 0x540, 0x954,
0x270, 0x496, 0x9D2, 0xD40, 0x9C4, 0x453, 0x554, 0x550,
0x03E, 0x50C, 0x50B, 0xA0D, 0x6AA, 0x819, 0x584, 0xE0A,
0x8A1, 0x5DA, 0x958, 0x067, 0x6AC, 0xECE, 0x0D0, 0x485,
0x56A, 0xAA0, 0x07C, 0x2C1, 0xE62, 0x05A, 0x284, 0xCC6,
0xA86, 0x76A, 0xCEC, 0xE09, 0xA95, 0x011, 0xE62, 0x049,
0x07D, 0xC4D, 0x6AA, 0x817, 0x0AD, 0x355, 0x024, 0x83C,
0xAA7, 0x19F, 0x5B3, 0x834, 0x554, 0x227, 0x1AA, 0x01F,
0x229, 0x150, 0xCD6, 0xDA8, 0x144, 0xC57, 0x514, 0x402,
0x2ED, 0x4AC, 0x026, 0xA84, 0x907, 0xA2C, 0x608, 0xAC4,
0xAC2, 0x602, 0x3D5, 0x450, 0x551, 0xA59, 0xC1B, 0xAAE,
0x69C, 0xC41, 0x34C, 0x550, 0x10C, 0x835, 0x429, 0x33C,
0xB33, 0x4D5, 0x509, 0xCCD, 0x550, 0x35B, 0x4E2, 0xAA0,
0x5E6, 0x205, 0xB09, 0x99C, 0x09F
]),
Int32Array.from([
0xD54, 0x221, 0x154, 0x7CD, 0xBF3, 0x112, 0x89B, 0xC5E,
0x9CD, 0x07E, 0xFB6, 0x78F, 0x7FA, 0x16F, 0x377, 0x4B4,
0x62D, 0x475, 0xBC2, 0x861, 0xB72, 0x9D0, 0x76A, 0x5A1,
0x22A, 0xF74, 0xDBA, 0x8B1, 0x139, 0xDCD, 0x012, 0x293,
0x705, 0xA34, 0xDD5, 0x3D2, 0x7F8, 0x0A6, 0x89A, 0x346,
0xCE0, 0x690, 0x40E, 0xFF3, 0xC4D, 0x97F, 0x9C9, 0x016,
0x73A, 0x923, 0xBCE, 0xFA9, 0xE6A, 0xB92, 0x02A, 0x07C,
0x04B, 0x8D5, 0x753, 0x42E, 0x67E, 0x87C, 0xEE6, 0xD7D,
0x2BF, 0xFB2, 0xFF8, 0x42F, 0x4CB, 0x214, 0x779, 0x02D,
0x606, 0xA02, 0x08A, 0xD4F, 0xB87, 0xDDF, 0xC49, 0xB51,
0x0E9, 0xF89, 0xAEF, 0xC92, 0x383, 0x98D, 0x367, 0xBD3,
0xA55, 0x148, 0x9DB, 0x913, 0xC79, 0x6FF, 0x387, 0x6EA,
0x7FA, 0xC1B, 0x12D, 0x303, 0xBCA, 0x503, 0x0FB, 0xB14,
0x0D4, 0xAD1, 0xAFC, 0x9DD, 0x404, 0x145, 0x6E5, 0x8ED,
0xF94, 0xD72, 0x645, 0xA21, 0x1A8, 0xABF, 0xC03, 0x91E,
0xD53, 0x48C, 0x471, 0x4E4, 0x408, 0x33C, 0x5DF, 0x73D,
0xA2A, 0x454, 0xD77, 0xC48, 0x2F5, 0x96A, 0x9CF, 0x047,
0x611, 0xE92, 0xC2F, 0xA98, 0x56D, 0x919, 0x615, 0x535,
0x67A, 0x8C1, 0x2E2, 0xBC4, 0xBE8, 0x328, 0x04F, 0x257,
0x3F9, 0xFA5, 0x477, 0x12E, 0x94B, 0x116, 0xEF7, 0x65F,
0x6B3, 0x915, 0xC64, 0x9AF, 0xB6C, 0x6A2, 0x50D, 0xEA3,
0x26E, 0xC23, 0x817, 0xA42, 0x71A, 0x9DD, 0xDA8, 0x84D,
0x3F3, 0x85B, 0xB00, 0x1FC, 0xB0A, 0xC2F, 0x00C, 0x095,
0xC58, 0x0E3, 0x807, 0x962, 0xC4B, 0x29A, 0x6FC, 0x958,
0xD29, 0x59E, 0xB14, 0x95A, 0xEDE, 0xF3D, 0xFB8, 0x0E5,
0x348, 0x2E7, 0x38E, 0x56A, 0x410, 0x3B1, 0x4B0, 0x793,
0xAB7, 0x0BC, 0x648, 0x719, 0xE3E, 0xFB4, 0x3B4, 0xE5C,
0x950, 0xD2A, 0x50B, 0x76F, 0x8D2, 0x3C7, 0xECC, 0x87C,
0x53A, 0xBA7, 0x4C3, 0x148, 0x437, 0x820, 0xECD, 0x660,
0x095, 0x2F4, 0x661, 0x6A4, 0xB74, 0x5F3, 0x1D2, 0x7EC,
0x8E2, 0xA40, 0xA6F, 0xFC3, 0x3BE, 0x1E9, 0x52C, 0x233,
0x173, 0x4EF, 0xA7C, 0x40B, 0x14C, 0x88D, 0xF30, 0x8D9,
0xBDB, 0x0A6, 0x940, 0xD46, 0xB2B, 0x03E, 0x46A, 0x641,
0xF08, 0xAFF, 0x496, 0x68A, 0x7A4, 0x0BA, 0xD43, 0x515,
0xB26, 0xD8F, 0x05C, 0xD6E, 0xA2C, 0xF25, 0x628, 0x4E5,
0x81D, 0xA2A, 0x1FF, 0x302, 0xFBD, 0x6D9, 0x711, 0xD8B,
0xE5C, 0x5CF, 0x42E, 0x008, 0x863, 0xB6F, 0x1E1, 0x3DA,
0xACE, 0x82B, 0x2DB, 0x7EB, 0xC15, 0x79F, 0xA79, 0xDAF,
0x00D, 0x2F6, 0x0CE, 0x370, 0x7E8, 0x9E6, 0x89F, 0xAE9,
0x175, 0xA95, 0x06B, 0x9DF, 0xAFF, 0x45B, 0x823, 0xAA4,
0xC79, 0x773, 0x886, 0x854, 0x0A5, 0x6D1, 0xE55, 0xEBB,
0x518, 0xE50, 0xF8F, 0x8CC, 0x834, 0x388, 0xCD2, 0xFC1,
0xA55, 0x1F8, 0xD1F, 0xE08, 0xF93, 0x362, 0xA22, 0x9FA,
0xCE5, 0x3C3, 0xDD4, 0xC53, 0xB94, 0xAD0, 0x6EB, 0x68D,
0x660, 0x8FC, 0xBCD, 0x914, 0x16F, 0x4C0, 0x134, 0xE1A,
0x76F, 0x9CB, 0x660, 0xEA0, 0x320, 0x15A, 0xCE3, 0x7E8,
0x03E, 0xB9A, 0xC90, 0xA14, 0x256, 0x1A8, 0x639, 0x7C6,
0xA59, 0xA65, 0x956, 0x9E4, 0x592, 0x6A9, 0xCFF, 0x4DC,
0xAA3, 0xD2A, 0xFDE, 0xA87, 0xBF5, 0x9F0, 0xC32, 0x94F,
0x675, 0x9A6, 0x369, 0x648, 0x289, 0x823, 0x498, 0x574,
0x8D1, 0xA13, 0xD1A, 0xBB5, 0xA19, 0x7F7, 0x775, 0x138,
0x949, 0xA4C, 0xE36, 0x126, 0xC85, 0xE05, 0xFEE, 0x962,
0x36D, 0x08D, 0xC76, 0x1E1, 0x1EC, 0x8D7, 0x231, 0xB68,
0x03C, 0x1DE, 0x7DF, 0x2B1, 0x09D, 0xC81, 0xDA4, 0x8F7,
0x6B9, 0x947, 0x9B0
])
);
});
it('testAztec 8.1 - synthetic test cases (compact mode message)', () => {
testEncodeDecodeRandom(GenericGF.AZTEC_PARAM, 2, 5);
});
it('testAztec 8.2 - synthetic test cases (full mode message)', () => {
testEncodeDecodeRandom(GenericGF.AZTEC_PARAM, 4, 6);
});
it('testAztec 8.3 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.AZTEC_DATA_6, 10, 7);
});
it('testAztec 8.4 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.AZTEC_DATA_6, 20, 12);
});
it('testAztec 8.5 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.AZTEC_DATA_8, 20, 11);
});
it('testAztec 8.6 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.AZTEC_DATA_8, 128, 127);
});
it('testAztec 8.7 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.AZTEC_DATA_10, 128, 128);
});
it('testAztec 8.8 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.AZTEC_DATA_10, 768, 255);
});
it('testAztec 8.9 - synthetic test cases', () => {
testEncodeDecodeRandom(GenericGF.AZTEC_DATA_12, 3072, 1023);
});
});
const DECODER_RANDOM_TEST_ITERATIONS: number /*int*/ = 3;
const DECODER_TEST_ITERATIONS: number /*int*/ = 10;
function testEncodeDecodeRandom(field: GenericGF, dataSize: number /*int*/, ecSize: number /*int*/): void {
assert.strictEqual(dataSize > 0 && dataSize <= field.getSize() - 3, true, 'Invalid data size for ' + field);
assert.strictEqual(ecSize > 0 && ecSize + dataSize <= field.getSize(), true, 'Invalid ECC size for ' + field);
const encoder = new ReedSolomonEncoder(field);
const message = new Int32Array(dataSize + ecSize);
const dataWords = new Int32Array(dataSize); /*Int32Array(dataSize)*/
const ecWords = new Int32Array(ecSize); /*Int32Array(ecSize)*/
const random: Random = getPseudoRandom();
const iterations: number /*int*/ = field.getSize() > 256 ? 1 : DECODER_RANDOM_TEST_ITERATIONS;
for (let i: number /*int*/ = 0; i < iterations; i++) {
// generate random data
for (let k: number /*int*/ = 0; k < dataSize; k++) {
dataWords[k] = random.next(field.getSize());
}
// generate ECC words
ZXingSystem.arraycopy(dataWords, 0, message, 0, dataWords.length);
encoder.encode(message, ecWords.length);
ZXingSystem.arraycopy(message, dataSize, ecWords, 0, ecSize);
// check to see if Decoder can fix up to ecWords/2 random errors
testDecoder(field, dataWords, ecWords);
}
}
function testEncodeDecode(field: GenericGF, dataWords: Int32Array, ecWords: Int32Array): void {
testEncoder(field, dataWords, ecWords);
testDecoder(field, dataWords, ecWords);
}
function testEncoder(field: GenericGF, dataWords: Int32Array, ecWords: Int32Array): void {
const encoder = new ReedSolomonEncoder(field);
const messageExpected = new Int32Array(dataWords.length + ecWords.length);
const message = new Int32Array(dataWords.length + ecWords.length);
ZXingSystem.arraycopy(dataWords, 0, messageExpected, 0, dataWords.length);
ZXingSystem.arraycopy(ecWords, 0, messageExpected, dataWords.length, ecWords.length);
ZXingSystem.arraycopy(dataWords, 0, message, 0, dataWords.length);
encoder.encode(message, ecWords.length);
assertDataEquals(message, messageExpected, 'Encode in ' + field + ' (' + dataWords.length + ',' + ecWords.length + ') failed');
}
function testDecoder(field: GenericGF, dataWords: Int32Array, ecWords: Int32Array): void {
const decoder = new ReedSolomonDecoder(field);
const message = new Int32Array(dataWords.length + ecWords.length);
const maxErrors: number /*int*/ = Math.floor(ecWords.length / 2);
const random: Random = getPseudoRandom();
const iterations: number /*int*/ = field.getSize() > 256 ? 1 : DECODER_TEST_ITERATIONS;
for (let j: number /*int*/ = 0; j < iterations; j++) {
for (let i: number /*int*/ = 0; i < ecWords.length; i++) {
if (i > 10 && i < Math.floor(ecWords.length / 2) - 10) {
// performance improvement - skip intermediate cases in long-running tests
i += Math.floor(ecWords.length / 10);
}
ZXingSystem.arraycopy(dataWords, 0, message, 0, dataWords.length);
ZXingSystem.arraycopy(ecWords, 0, message, dataWords.length, ecWords.length);
corrupt(message, i, random, field.getSize());
try {
decoder.decode(message, ecWords.length);
} catch (e/*ReedSolomonException e*/) {
// fail only if maxErrors exceeded
assert.strictEqual(i > maxErrors, true,
'Decode in ' + field + ' (' + dataWords.length + ',' + ecWords.length + ') failed at ' + i + ' errors: ' + e);
// else stop
break;
}
if (i < maxErrors) {
assertDataEquals(message,
dataWords,
'Decode in ' + field + ' (' + dataWords.length + ',' + ecWords.length + ') failed at ' + i + ' errors');
}
}
}
}
function assertDataEquals(received: Int32Array, expected: Int32Array, message: string): void {
for (let i: number /*int*/ = 0; i < expected.length; i++) {
if (expected[i] !== received[i]) {
const receivedToString = arrayToString(Int32Array.from(received.subarray(0, expected.length)));
assert.ok(false, `${message}. Mismatch at ${i}. Expected ${arrayToString(expected)}, got ${receivedToString}`);
}
}
}
function arrayToString(data: Int32Array): String {
const sb = new ZXingStringBuilder();
sb.append('{');
for (let i: number /*int*/ = 0; i < data.length; i++) {
if (i > 0) {
sb.append(',');
}
sb.append(data[i].toString(16));
}
return sb.append('}').toString();
}
function getPseudoRandom(): Random {
return new Random('0xDEADBEEF');
}
| zxing-js/library | src/test/core/common/reedsolomon/ReedSolomon.spec.ts | TypeScript | mit | 32,086 |
require "capybara/rspec"
require "webmock/rspec"
require "plek"
require "gds_api/test_helpers/publishing_api"
WebMock.disable_net_connect!(allow_localhost: true)
RSpec.configure do |config|
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
config.include GdsApi::TestHelpers::PublishingApi
end
| alphagov/service-manual-publisher | spec/spec_helper.rb | Ruby | mit | 463 |
#!/usr/bin/env python
"""
Project-wide application configuration.
DO NOT STORE SECRETS, PASSWORDS, ETC. IN THIS FILE.
They will be exposed to users. Use environment variables instead.
See get_secrets() below for a fast way to access them.
"""
import os
"""
NAMES
"""
# Project name used for display
PROJECT_NAME = 'quotable'
# Project name in urls
# Use dashes, not underscores!
PROJECT_SLUG = 'quotable'
# The name of the repository containing the source
REPOSITORY_NAME = 'quotable'
REPOSITORY_URL = 'git@github.com:nprapps/%s.git' % REPOSITORY_NAME
REPOSITORY_ALT_URL = None # 'git@bitbucket.org:nprapps/%s.git' % REPOSITORY_NAME'
# The name to be used in paths on the server
PROJECT_FILENAME = 'quotable'
"""
DEPLOYMENT
"""
FILE_SERVER = 'tools.apps.npr.org'
S3_BUCKET = 'tools.apps.npr.org'
ASSETS_S3_BUCKET = 'assets.apps.npr.org'
# These variables will be set at runtime. See configure_targets() below
DEBUG = True
"""
COPY EDITING
"""
COPY_GOOGLE_DOC_KEY = '0AlXMOHKxzQVRdHZuX1UycXplRlBfLVB0UVNldHJYZmc'
"""
SHARING
"""
PROJECT_DESCRIPTION = 'An opinionated project template for (mostly) server-less apps.'
SHARE_URL = 'http://%s/%s/' % (S3_BUCKET, PROJECT_SLUG)
TWITTER = {
'TEXT': PROJECT_NAME,
'URL': SHARE_URL,
# Will be resized to 120x120, can't be larger than 1MB
'IMAGE_URL': ''
}
FACEBOOK = {
'TITLE': PROJECT_NAME,
'URL': SHARE_URL,
'DESCRIPTION': PROJECT_DESCRIPTION,
# Should be square. No documented restrictions on size
'IMAGE_URL': TWITTER['IMAGE_URL'],
'APP_ID': '138837436154588'
}
GOOGLE = {
# Thumbnail image for Google News / Search.
# No documented restrictions on resolution or size
'IMAGE_URL': TWITTER['IMAGE_URL']
}
NPR_DFP = {
'STORY_ID': '203618536',
'TARGET': 'News_NPR_News_Investigations',
'ENVIRONMENT': 'NPRTEST',
'TESTSERVER': 'true'
}
"""
SERVICES
"""
GOOGLE_ANALYTICS_ID = 'UA-5828686-4'
| 18F/quotable | app_config.py | Python | mit | 1,915 |
class AddIndexToSkillTotals < ActiveRecord::Migration
def change
add_index :skill_totals, :name
add_index :skill_totals, :date
end
end
| thatguyandy27/SkillsCompiler | skills_app/db/migrate/20140316173555_add_index_to_skill_totals.rb | Ruby | mit | 147 |
require_relative '../../../spec_helper'
require 'matrix'
describe "Vector#normalize" do
it "returns a normalized copy of the vector" do
x = 0.2672612419124244
Vector[1, 2, 3].normalize.should == Vector[x, x * 2, x * 3]
end
it "raises an error for zero vectors" do
lambda {
Vector[].normalize
}.should raise_error(Vector::ZeroVectorError)
lambda {
Vector[0, 0, 0].normalize
}.should raise_error(Vector::ZeroVectorError)
end
end
| ruby/rubyspec | library/matrix/vector/normalize_spec.rb | Ruby | mit | 473 |
'use strict';
/**
* Module dependencies.
*/
var users = require('../../app/controllers/users'),
goaliedash = require('../../app/controllers/goaliedash');
module.exports = function(app) {
app.route('/goaliedash')
.get(users.requiresLogin, users.hasAuthorization);
}; | thcmc/412hockey | app/routes/goaliedash.server.routes.js | JavaScript | mit | 275 |
// @flow
import React from 'react'
import withPropsStream from '@vega/utils/withPropsStream'
import {map} from 'rxjs/operators'
import styles from './styles/Communicator.css'
import ThreadList from './ThreadList'
import CreateComment from './CreateComment'
function getPropsStream(props$) {
// todo: implement open/close behavior
return props$.pipe(map(props => ({...props, isOpen: true})))
}
type Props = {
isOpen: boolean,
subjectIds: string[],
focusedCommentId: string
}
export default withPropsStream(
getPropsStream,
class Communicator extends React.Component<Props> {
state = {
createCommentIsSticky: false
}
handleCloseCreateComment = event => {
this.setState({
createCommentIsSticky: false
})
event.stopPropagation()
}
handleStickCreateComment = () => {
this.setState({
createCommentIsSticky: true
})
}
render() {
const {isOpen, subjectIds, focusedCommentId} = this.props
const {createCommentIsSticky} = this.state
return isOpen ? (
<div className={styles.root}>
<div
className={
createCommentIsSticky
? styles.feedWithWithStickyCreateComment
: styles.feed
}
>
<ThreadList
subjectId={subjectIds}
focusedCommentId={focusedCommentId}
/>
</div>
{subjectIds.length === 1 && (
<CreateComment
subjectId={subjectIds[0]}
showCloseButton={createCommentIsSticky}
className={
createCommentIsSticky
? styles.createCommentSticky
: styles.createComment
}
onClose={this.handleCloseCreateComment}
onSubmit={this.handleCloseCreateComment}
onClick={this.handleStickCreateComment}
/>
)}
</div>
) : null
}
}
)
| VegaPublish/vega-studio | packages/@vega/communicator-system/src/components/providers/Communicator.js | JavaScript | mit | 1,991 |
package pricing;
import org.configureme.ConfigurationManager;
import org.configureme.Environment;
import org.configureme.GlobalEnvironment;
import org.configureme.environments.DynamicEnvironment;
public class ShowPrice {
public static void main(String a[]){
showPrice();
showPriceIn("USA", GlobalEnvironment.INSTANCE);
showPriceIn("United Kingdom", new DynamicEnvironment("europe", "uk"));
showPriceIn("Germany", new DynamicEnvironment("europe", "de"));
showPriceIn("Austria", new DynamicEnvironment("europe", "at"));
}
private static void showPriceIn(String description, Environment environment){
Pricing pricing = new Pricing();
ConfigurationManager.INSTANCE.configure(pricing, environment);
System.out.println("Price in "+description+" is "+pricing.getProductPrice());
}
private static void showPrice(){
Pricing pricing = new Pricing();
ConfigurationManager.INSTANCE.configure(pricing);
System.out.println("Please pay "+pricing.getProductPrice());
}
}
| anotheria/configureme | src/examples/pricing/ShowPrice.java | Java | mit | 989 |
/*
* Copyright (c) 2014-2022 The Voxie Authors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#pragma once
#include <VoxieBackend/VoxieBackend.hpp>
#include <VoxieBackend/Data/Data.hpp>
#include <VoxieClient/ObjectExport/Client.hpp>
#include <VoxieClient/ObjectExport/ExportedObject.hpp>
#include <QtCore/QPointer>
#include <QtDBus/QDBusAbstractAdaptor>
#include <QtDBus/QDBusObjectPath>
namespace vx {
class NodePrototype;
namespace io {
class Operation;
class RunFilterOperation;
} // namespace io
class Exception;
class ExternalOperationAdaptorImpl;
class VOXIEBACKEND_EXPORT ExternalOperation : public vx::RefCountedObject {
Q_OBJECT
REFCOUNTEDOBJ_DECL(ExternalOperation)
friend class ExternalOperationAdaptorImpl;
QPointer<vx::Client> client = nullptr;
QSharedPointer<vx::io::Operation> operation_;
protected:
// TODO: Get rid of this / replace it by operation()->isFinished()?
bool isFinished = false;
void checkClient();
virtual void cleanup();
public:
explicit ExternalOperation(
const QSharedPointer<vx::io::Operation>& operation);
~ExternalOperation() override;
QWeakPointer<QSharedPointer<ExternalOperation>> initialReference;
virtual QString action() = 0;
virtual QString name() = 0;
const QSharedPointer<vx::io::Operation>& operation() const {
return operation_;
}
bool isClaimed();
Q_SIGNALS:
void error(const vx::Exception& error);
// Emitted when the operation is claimed
void claimed();
};
// TODO: Should probably be moved to ExtensionImporter / ExtensionExporter
class ExternalOperationImportAdaptorImpl;
class VOXIEBACKEND_EXPORT ExternalOperationImport : public ExternalOperation {
Q_OBJECT
REFCOUNTEDOBJ_DECL(ExternalOperationImport)
friend class ExternalOperationImportAdaptorImpl;
QString filename_;
QMap<QString, QDBusVariant> properties_;
QString name_;
public:
explicit ExternalOperationImport(
const QSharedPointer<vx::io::Operation>& operation,
const QString& filename, const QMap<QString, QDBusVariant>& properties,
const QString& name);
~ExternalOperationImport() override;
QString action() override;
QString name() override;
const QString& filename() { return filename_; }
const QMap<QString, QDBusVariant>& properties() { return properties_; }
Q_SIGNALS:
void finished(const QSharedPointer<vx::Data>& data);
};
class ExternalOperationExportAdapterImpl;
class VOXIEBACKEND_EXPORT ExternalOperationExport : public ExternalOperation {
Q_OBJECT
REFCOUNTEDOBJ_DECL(ExternalOperationExport)
friend class ExternalOperationExportAdaptorImpl;
QString filename_;
QString name_;
QSharedPointer<vx::Data> data_;
public:
explicit ExternalOperationExport(
const QSharedPointer<vx::io::Operation>& operation,
const QString& filename, const QString& name,
const QSharedPointer<vx::Data>& data);
~ExternalOperationExport() override;
QString action() override;
QString name() override;
// TODO: data
const QString& filename() { return filename_; }
const QSharedPointer<vx::Data>& data() { return data_; }
Q_SIGNALS:
void finished();
};
} // namespace vx
| voxie-viewer/voxie | src/VoxieBackend/Component/ExternalOperation.hpp | C++ | mit | 4,192 |
var models = require('../models');
var express = require('express');
var router = express.Router();
/* GET home page. */
router.get('/', function(req, res, next) {
console.log(req.session);
res.render('layout');
});
module.exports = router; | NUSPartTime/NUSPartTime | routes/index.js | JavaScript | mit | 248 |
import os
import logging
from django.core.management.base import BaseCommand
from django.core.mail import send_mail
from django.template.loader import get_template
from workshops.models import Badge, Person, Role
logger = logging.getLogger()
class Command(BaseCommand):
help = 'Report instructors activity.'
def add_arguments(self, parser):
parser.add_argument(
'--send-out-for-real', action='store_true', default=False,
help='Send information to the instructors.',
)
parser.add_argument(
'--no-may-contact-only', action='store_true', default=False,
help='Include instructors not willing to be contacted.',
)
parser.add_argument(
'--django-mailing', action='store_true', default=False,
help='Use Django mailing system. This requires some environmental '
'variables to be set, see `settings.py`.',
)
parser.add_argument(
'-s', '--sender', action='store',
default='workshops@carpentries.org',
help='E-mail used in "from:" field.',
)
def foreign_tasks(self, tasks, person, roles):
"""List of other instructors' tasks, per event."""
return [
task.event.task_set.filter(role__in=roles)
.exclude(person=person)
.select_related('person')
for task in tasks
]
def fetch_activity(self, may_contact_only=True):
roles = Role.objects.filter(name__in=['instructor', 'helper'])
instructor_badges = Badge.objects.instructor_badges()
instructors = Person.objects.filter(badges__in=instructor_badges)
instructors = instructors.exclude(email__isnull=True)
if may_contact_only:
instructors = instructors.exclude(may_contact=False)
# let's get some things faster
instructors = instructors.select_related('airport') \
.prefetch_related('task_set', 'lessons',
'award_set', 'badges')
# don't repeat the records
instructors = instructors.distinct()
result = []
for person in instructors:
tasks = person.task_set.filter(role__in=roles) \
.select_related('event', 'role')
record = {
'person': person,
'lessons': person.lessons.all(),
'instructor_awards': person.award_set.filter(
badge__in=person.badges.instructor_badges()
),
'tasks': zip(tasks,
self.foreign_tasks(tasks, person, roles)),
}
result.append(record)
return result
def make_message(self, record):
tmplt = get_template('mailing/instructor_activity.txt')
return tmplt.render(context=record)
def subject(self, record):
# in future we can vary the subject depending on the record details
return 'Updating your Software Carpentry information'
def recipient(self, record):
return record['person'].email
def send_message(self, subject, message, sender, recipient, for_real=False,
django_mailing=False):
if for_real:
if django_mailing:
send_mail(subject, message, sender, [recipient])
else:
command = 'mail -s "{subject}" -r {sender} {recipient}'.format(
subject=subject,
sender=sender,
recipient=recipient,
)
writer = os.popen(command, 'w')
writer.write(message)
writer.close()
if self.verbosity >= 2:
# write only a header
self.stdout.write('-' * 40 + '\n')
self.stdout.write('To: {}\n'.format(recipient))
self.stdout.write('Subject: {}\n'.format(subject))
self.stdout.write('From: {}\n'.format(sender))
if self.verbosity >= 3:
# write whole message out
self.stdout.write(message + '\n')
def handle(self, *args, **options):
# default is dummy run - only actually send mail if told to
send_for_real = options['send_out_for_real']
# by default include only instructors who have `may_contact==True`
no_may_contact_only = options['no_may_contact_only']
# use mailing options from settings.py or the `mail` system command?
django_mailing = options['django_mailing']
# verbosity option is added by Django
self.verbosity = int(options['verbosity'])
sender = options['sender']
results = self.fetch_activity(not no_may_contact_only)
for result in results:
message = self.make_message(result)
subject = self.subject(result)
recipient = self.recipient(result)
self.send_message(subject, message, sender, recipient,
for_real=send_for_real,
django_mailing=django_mailing)
if self.verbosity >= 1:
self.stdout.write('Sent {} emails.\n'.format(len(results)))
| swcarpentry/amy | amy/workshops/management/commands/instructors_activity.py | Python | mit | 5,305 |
<?php
/*
* This file is part of the Phuri package.
*
* Copyright © 2014 Erin Millard
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Eloquent\Phuri\Generic;
use Eloquent\Pathogen\Factory\PathFactoryInterface;
use Eloquent\Phuri\Generic\Parser\GenericUriComponentsInterface;
use Eloquent\Phuri\Normalization\UriNormalizerInterface;
use Eloquent\Phuri\Parameters\Factory\UriParametersFactoryInterface;
use Eloquent\Phuri\Path\Factory\UriPathFactory;
use Eloquent\Phuri\UriInterface;
use Eloquent\Phuri\Validation\Exception\InvalidUriComponentExceptionInterface;
/**
* An abstract base class for implementing generic URIs.
*/
abstract class AbstractGenericUri implements GenericUriInterface
{
/**
* Construct a new generic URI.
*
* @param GenericUriComponentsInterface $components The URI components.
*
* @throws InvalidUriComponentExceptionInterface If any of the components are invalid.
*/
public function __construct(GenericUriComponentsInterface $components)
{
$this->username = $components->username();
$this->password = $components->password();
$this->host = $components->host();
$this->port = $components->port();
$this->path = $components->path();
$this->fragment = $components->fragment();
if (null === $components->queryParameters()) {
$this->hasQueryDelimiter = false;
$this->queryParameters = static::queryParametersFactory()
->createEmpty();
} else {
$this->hasQueryDelimiter = true;
$this->queryParameters = static::queryParametersFactory()
->createFromEncodedPairs($components->queryParameters());
}
}
// Implementation of GenericUriInterface ===================================
/**
* Returns true if this URI has a username.
*
* This method will return false for URIs with empty string usernames.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.2.1
*
* @return boolean True if this URI has a username.
*/
public function hasUsername()
{
return null !== $this->encodedUsername() &&
'' !== $this->encodedUsername();
}
/**
* Get the username.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.2.1
*
* @return string|null The username, or null if there is no username.
*/
public function username()
{
if (null === $this->encodedUsername()) {
return null;
}
return static::encoder()->decode($this->encodedUsername());
}
/**
* Get the encoded username.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.2.1
*
* @return string|null The encoded username, or null if there is no username.
*/
public function encodedUsername()
{
return $this->username;
}
/**
* Returns true if this URI has a password.
*
* This method will return false for URIs with empty string passwords.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.2.1
*
* @return boolean True if this URI has a password.
*/
public function hasPassword()
{
return null !== $this->encodedPassword() &&
'' !== $this->encodedPassword();
}
/**
* Get the password.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.2.1
*
* @return string|null The password, or null if there is no password.
*/
public function password()
{
if (null === $this->encodedPassword()) {
return null;
}
return static::encoder()->decode($this->encodedPassword());
}
/**
* Get the encoded password.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.2.1
*
* @return string|null The encoded password, or null if there is no password.
*/
public function encodedPassword()
{
return $this->password;
}
/**
* Get the encoded host.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.2.2
*
* @return string|null The encoded host, or null if there is no host.
*/
public function encodedHost()
{
return $this->host;
}
/**
* Returns true if this URI has a port.
*
* This method will return false for URIs with empty string ports.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.2.3
*
* @return boolean True if this URI has a port.
*/
public function hasPort()
{
return null !== $this->encodedPort() && '' !== $this->encodedPort();
}
/**
* Get the port.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.2.3
*
* @return integer|null The port, or null if there is no port, or the port is an empty string.
*/
public function port()
{
if ($this->hasPort()) {
return intval($this->encodedPort());
}
return null;
}
/**
* Get the encoded port.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.2.3
*
* @return string|null The encoded port, or null if there is no port.
*/
public function encodedPort()
{
return $this->port;
}
/**
* Returns true if this URI has a path.
*
* This method will return false for URIs with empty string paths.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.3
*
* @return boolean True if this URI has a path.
*/
public function hasPath()
{
return '' !== $this->path();
}
/**
* Get the path.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.3
*
* @return string The path.
*/
public function path()
{
return static::encoder()->decode($this->encodedPath());
}
/**
* Get the encoded path.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.3
*
* @return string The encoded path.
*/
public function encodedPath()
{
return $this->path;
}
/**
* Get the path as a path object.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.3
*
* @return UriPathInterface The path.
*/
public function pathObject()
{
return static::pathFactory()->create($this->path());
}
/**
* Returns true if this URI has a query.
*
* This method will return false for URIs with empty string queries.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.4
*
* @return boolean True if this URI has a query.
*/
public function hasQuery()
{
return !$this->queryParameters()->isEmpty();
}
/**
* Get the query.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.4
*
* @return string|null The query, or null if there is no query.
*/
public function query()
{
if ($this->hasQueryDelimiter()) {
return static::encoder()->decode($this->encodedQuery());
}
return null;
}
/**
* Get the encoded query.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.4
*
* @return string|null The encoded query, or null if there is no query.
*/
public function encodedQuery()
{
if ($this->hasQueryDelimiter()) {
return $this->queryParameters()->string();
}
return null;
}
/**
* Get the query parameters.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.4
*
* @return UriParametersInterface The query parameters.
*/
public function queryParameters()
{
return $this->queryParameters;
}
/**
* Returns true if this URI has a fragment.
*
* This method will return false for URIs with empty string fragments.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.5
*
* @return boolean True if this URI has a fragment.
*/
public function hasFragment()
{
return null !== $this->encodedFragment() &&
'' !== $this->encodedFragment();
}
/**
* Get the fragment.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.5
*
* @return string|null The fragment, or null if there is no fragment.
*/
public function fragment()
{
if (null === $this->encodedFragment()) {
return null;
}
return static::encoder()->decode($this->encodedFragment());
}
/**
* Get the encoded fragment.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.5
*
* @return string|null The encoded fragment, or null if there is no fragment.
*/
public function encodedFragment()
{
return $this->fragment;
}
/**
* Get the fragment parameters.
*
* @link http://tools.ietf.org/html/rfc3986#section-3.5
*
* @return UriParametersInterface The fragment parameters.
*/
public function fragmentParameters()
{
if (null === $this->encodedFragment()) {
return static::queryParametersFactory()->createEmpty();
}
return static::queryParametersFactory()
->createFromString($this->encodedFragment());
}
// Implementation of UriInterface ==========================================
/**
* Return a normalized version of this URI.
*
* @return UriInterface A normalized version of this URI.
*/
public function normalize()
{
return static::normalizer()->normalize($this);
}
/**
* Get a string representation of this URI.
*
* @return string A string representation of this URI.
*/
public function __toString()
{
return $this->string();
}
// Implementation details ==================================================
/**
* Returns true if this URI has a query delimiter.
*
* @return boolean True if this URI has a query delimiter.
*/
public function hasQueryDelimiter()
{
return $this->hasQueryDelimiter;
}
/**
* Get the most appropriate factory for this type of URI.
*
* @return Factory\GenericUriFactoryInterface The factory.
*/
protected static function factory()
{
return Factory\GenericUriFactory::instance();
}
/**
* Get the most appropriate path factory for this type of URI.
*
* @return PathFactoryInterface The factory.
*/
protected static function pathFactory()
{
return UriPathFactory::instance();
}
/**
* Get the most appropriate query parameters factory for this type of URI.
*
* @return UriParametersFactoryInterface The factory.
*/
protected static function queryParametersFactory()
{
return Factory\GenericUriQueryParametersFactory::instance();
}
/**
* Get the most appropriate validator for this type of URI.
*
* @return Validation\GenericUriValidatorInterface The validator.
*/
protected static function validator()
{
return Validation\GenericUriValidator::instance();
}
/**
* Get the most appropriate encoder for this type of URI.
*
* @return Encoding\GenericUriEncoderInterface The encoder.
*/
protected static function encoder()
{
return Encoding\GenericUriEncoder::instance();
}
/**
* Get the most appropriate normalizer for this type of URI.
*
* @return UriNormalizerInterface The normalizer.
*/
protected static function normalizer()
{
return Normalization\GenericUriNormalizer::instance();
}
private $username;
private $password;
private $host;
private $port;
private $path;
private $hasQueryDelimiter;
private $queryParameters;
private $fragment;
}
| ezzatron/phuri | src/Generic/AbstractGenericUri.php | PHP | mit | 12,051 |
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.data.manipulator.block;
import static org.spongepowered.api.data.DataQuery.of;
import org.spongepowered.api.data.DataContainer;
import org.spongepowered.api.data.DataQuery;
import org.spongepowered.api.data.MemoryDataContainer;
import org.spongepowered.api.data.manipulator.block.SignaledOutputData;
import org.spongepowered.common.data.manipulator.AbstractIntData;
public class SpongeSignaledOutputData extends AbstractIntData<SignaledOutputData> implements SignaledOutputData {
public static final DataQuery OUTPUT_SIGNAL_STRENGTH = of("OutputSignalStrength");
public SpongeSignaledOutputData() {
super(SignaledOutputData.class, 0, 0, 15);
}
@Override
public int getOutputSignal() {
return this.getValue();
}
@Override
public SignaledOutputData setOutputSignal(int signal) {
return this.setValue(signal);
}
@Override
public SignaledOutputData copy() {
return new SpongeSignaledOutputData().setValue(this.getValue());
}
@Override
public DataContainer toContainer() {
return new MemoryDataContainer().set(OUTPUT_SIGNAL_STRENGTH, this.getValue());
}
}
| gabizou/SpongeCommon | src/main/java/org/spongepowered/common/data/manipulator/block/SpongeSignaledOutputData.java | Java | mit | 2,441 |
// Copyright (c) 2013 Raphael Estrada
// License: The MIT License - see "LICENSE" file for details
// Author URL: http://www.galaktor.net
// Author E-Mail: galaktor@gmx.de
using System.Reflection;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("AutofacExtensions")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Raphael Estrada")]
[assembly: AssemblyProduct("AutofacExtensions")]
[assembly: AssemblyCopyright("Copyright (c) Raphael Estrada 2013")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("31dee9f1-b44b-4a04-89cf-d17ea82953ef")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("0.0.0.0")]
[assembly: AssemblyVersion("0.0.0.0")]
[assembly: AssemblyFileVersion("0.0.0.0")] | galaktor/autofac-extensions | Properties/AssemblyInfo.cs | C# | mit | 1,633 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_delete_request(
scope: str,
policy_assignment_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}')
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, 'str', skip_quote=True),
"policyAssignmentName": _SERIALIZER.url("policy_assignment_name", policy_assignment_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_create_request(
scope: str,
policy_assignment_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}')
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, 'str', skip_quote=True),
"policyAssignmentName": _SERIALIZER.url("policy_assignment_name", policy_assignment_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_get_request(
scope: str,
policy_assignment_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}')
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, 'str', skip_quote=True),
"policyAssignmentName": _SERIALIZER.url("policy_assignment_name", policy_assignment_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_for_resource_group_request(
resource_group_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/policyAssignments')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str', skip_quote=True)
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_for_resource_request(
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/policyAssignments')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, 'str'),
"parentResourcePath": _SERIALIZER.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
"resourceType": _SERIALIZER.url("resource_type", resource_type, 'str', skip_quote=True),
"resourceName": _SERIALIZER.url("resource_name", resource_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_request(
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyAssignments')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_delete_by_id_request(
policy_assignment_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{policyAssignmentId}')
path_format_arguments = {
"policyAssignmentId": _SERIALIZER.url("policy_assignment_id", policy_assignment_id, 'str', skip_quote=True),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_create_by_id_request(
policy_assignment_id: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{policyAssignmentId}')
path_format_arguments = {
"policyAssignmentId": _SERIALIZER.url("policy_assignment_id", policy_assignment_id, 'str', skip_quote=True),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_get_by_id_request(
policy_assignment_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2016-12-01"
accept = "application/json, text/json"
# Construct URL
url = kwargs.pop("template_url", '/{policyAssignmentId}')
path_format_arguments = {
"policyAssignmentId": _SERIALIZER.url("policy_assignment_id", policy_assignment_id, 'str', skip_quote=True),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class PolicyAssignmentsOperations(object):
"""PolicyAssignmentsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.resource.policy.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def delete(
self,
scope: str,
policy_assignment_name: str,
**kwargs: Any
) -> Optional["_models.PolicyAssignment"]:
"""Deletes a policy assignment.
:param scope: The scope of the policy assignment.
:type scope: str
:param policy_assignment_name: The name of the policy assignment to delete.
:type policy_assignment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment or None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.PolicyAssignment"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request(
scope=scope,
policy_assignment_name=policy_assignment_name,
template_url=self.delete.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete.metadata = {'url': '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'} # type: ignore
@distributed_trace
def create(
self,
scope: str,
policy_assignment_name: str,
parameters: "_models.PolicyAssignment",
**kwargs: Any
) -> "_models.PolicyAssignment":
"""Creates a policy assignment.
Policy assignments are inherited by child resources. For example, when you apply a policy to a
resource group that policy is assigned to all resources in the group.
:param scope: The scope of the policy assignment.
:type scope: str
:param policy_assignment_name: The name of the policy assignment.
:type policy_assignment_name: str
:param parameters: Parameters for the policy assignment.
:type parameters: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'PolicyAssignment')
request = build_create_request(
scope=scope,
policy_assignment_name=policy_assignment_name,
content_type=content_type,
json=_json,
template_url=self.create.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create.metadata = {'url': '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'} # type: ignore
@distributed_trace
def get(
self,
scope: str,
policy_assignment_name: str,
**kwargs: Any
) -> "_models.PolicyAssignment":
"""Gets a policy assignment.
:param scope: The scope of the policy assignment.
:type scope: str
:param policy_assignment_name: The name of the policy assignment to get.
:type policy_assignment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
scope=scope,
policy_assignment_name=policy_assignment_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}'} # type: ignore
@distributed_trace
def list_for_resource_group(
self,
resource_group_name: str,
filter: Optional[str] = None,
**kwargs: Any
) -> Iterable["_models.PolicyAssignmentListResult"]:
"""Gets policy assignments for the resource group.
:param resource_group_name: The name of the resource group that contains policy assignments.
:type resource_group_name: str
:param filter: The filter to apply on the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyAssignmentListResult or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignmentListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignmentListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_for_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.list_for_resource_group.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_for_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyAssignmentListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_for_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/policyAssignments'} # type: ignore
@distributed_trace
def list_for_resource(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
filter: Optional[str] = None,
**kwargs: Any
) -> Iterable["_models.PolicyAssignmentListResult"]:
"""Gets policy assignments for a resource.
:param resource_group_name: The name of the resource group containing the resource. The name is
case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource path.
:type parent_resource_path: str
:param resource_type: The resource type.
:type resource_type: str
:param resource_name: The name of the resource with policy assignments.
:type resource_name: str
:param filter: The filter to apply on the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyAssignmentListResult or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignmentListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignmentListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_for_resource_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.list_for_resource.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_for_resource_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyAssignmentListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_for_resource.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/policyAssignments'} # type: ignore
@distributed_trace
def list(
self,
filter: Optional[str] = None,
**kwargs: Any
) -> Iterable["_models.PolicyAssignmentListResult"]:
"""Gets all the policy assignments for a subscription.
:param filter: The filter to apply on the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyAssignmentListResult or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignmentListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignmentListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyAssignmentListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyAssignments'} # type: ignore
@distributed_trace
def delete_by_id(
self,
policy_assignment_id: str,
**kwargs: Any
) -> "_models.PolicyAssignment":
"""Deletes a policy assignment by ID.
When providing a scope for the assignment, use '/subscriptions/{subscription-id}/' for
subscriptions, '/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}' for
resource groups, and
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}/providers/{resource-provider-namespace}/{resource-type}/{resource-name}'
for resources.
:param policy_assignment_id: The ID of the policy assignment to delete. Use the format
'/{scope}/providers/Microsoft.Authorization/policyAssignments/{policy-assignment-name}'.
:type policy_assignment_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_by_id_request(
policy_assignment_id=policy_assignment_id,
template_url=self.delete_by_id.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_by_id.metadata = {'url': '/{policyAssignmentId}'} # type: ignore
@distributed_trace
def create_by_id(
self,
policy_assignment_id: str,
parameters: "_models.PolicyAssignment",
**kwargs: Any
) -> "_models.PolicyAssignment":
"""Creates a policy assignment by ID.
Policy assignments are inherited by child resources. For example, when you apply a policy to a
resource group that policy is assigned to all resources in the group. When providing a scope
for the assignment, use '/subscriptions/{subscription-id}/' for subscriptions,
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}' for resource groups,
and
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}/providers/{resource-provider-namespace}/{resource-type}/{resource-name}'
for resources.
:param policy_assignment_id: The ID of the policy assignment to create. Use the format
'/{scope}/providers/Microsoft.Authorization/policyAssignments/{policy-assignment-name}'.
:type policy_assignment_id: str
:param parameters: Parameters for policy assignment.
:type parameters: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'PolicyAssignment')
request = build_create_by_id_request(
policy_assignment_id=policy_assignment_id,
content_type=content_type,
json=_json,
template_url=self.create_by_id.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_by_id.metadata = {'url': '/{policyAssignmentId}'} # type: ignore
@distributed_trace
def get_by_id(
self,
policy_assignment_id: str,
**kwargs: Any
) -> "_models.PolicyAssignment":
"""Gets a policy assignment by ID.
When providing a scope for the assignment, use '/subscriptions/{subscription-id}/' for
subscriptions, '/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}' for
resource groups, and
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}/providers/{resource-provider-namespace}/{resource-type}/{resource-name}'
for resources.
:param policy_assignment_id: The ID of the policy assignment to get. Use the format
'/{scope}/providers/Microsoft.Authorization/policyAssignments/{policy-assignment-name}'.
:type policy_assignment_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyAssignment, or the result of cls(response)
:rtype: ~azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_by_id_request(
policy_assignment_id=policy_assignment_id,
template_url=self.get_by_id.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_by_id.metadata = {'url': '/{policyAssignmentId}'} # type: ignore
| Azure/azure-sdk-for-python | sdk/resources/azure-mgmt-resource/azure/mgmt/resource/policy/v2016_12_01/operations/_policy_assignments_operations.py | Python | mit | 38,695 |