text
stringlengths 27
775k
|
---|
import React from 'react'
import { Tablist, Tab } from 'evergreen-ui'
import SyntaxHighlighter, { registerLanguage } from 'react-syntax-highlighter/dist/light'
import js from 'react-syntax-highlighter/dist/languages/javascript'
import syntaxStyle from './syntaxStyle'
import styles from './App.css'
registerLanguage('javascript', js)
const tabs = ['Nightmare', 'Puppeteer']
const App = ({ onSelectTab, selectedTab, onRestart, recording }) => {
let script = ''
if (selectedTab === 'Nightmare') {
script = getNightmare(recording)
} else if (selectedTab === 'Puppeteer') {
script = getPuppeteer(recording)
}
return (
<div>
<Tablist marginX={-4} marginBottom={16} textAlign='center'>
{tabs.map((tab, index) => (
<Tab
key={tab}
id={tab}
isSelected={tab === selectedTab}
onSelect={() => onSelectTab(tab)}
aria-controls={`panel-${tab}`}
>
{tab}
</Tab>
))}
</Tablist>
<SyntaxHighlighter language='javascript' style={syntaxStyle}>
{script}
</SyntaxHighlighter>
<button className={styles.button} onClick={onRestart}>Restart</button>
</div>
)
}
function getNightmare (recording) {
return `const Nightmare = require('nightmare')
const nightmare = Nightmare({ show: true })
nightmare
${recording.reduce((records, record, i) => {
const { action, url, selector, value } = record
let result = records
if (i !== records.length) result += '\n'
switch (action) {
case 'keydown':
result += `.type('${selector}', '${value}')`
break
case 'click':
result += `.click('${selector}')`
break
case 'goto':
result += `.goto('${url}')`
break
case 'reload':
result += `.refresh()`
break
}
return result
}, '')}
.end()
.then(function (result) {
console.log(result)
})
.catch(function (error) {
console.error('Error:', error);
});`
}
function getPuppeteer (recording) {
return `const puppeteer = require('puppeteer')
(async () => {
const browser = await puppeteer.launch()
const page = await browser.newPage()
${recording.reduce((records, record, i) => {
const { action, url, selector, value } = record
let result = records
if (i !== records.length) result += '\n'
switch (action) {
case 'keydown':
result += ` await page.type('${selector}', '${value}')`
break
case 'click':
result += ` await page.click('${selector}')`
break
case 'goto':
result += ` await page.goto('${url}')`
break
case 'reload':
result += ` await page.reload()`
break
}
return result
}, '')}
await browser.close()
})()`
}
export default App
|
<?php
namespace App\Containers\Applies\Data\Repositories;
use App\Ship\Parents\Repositories\Repository;
/**
* Class AppliesRepository
*/
class AppliesRepository extends Repository
{
/**
* @var array
*/
protected $fieldSearchable = [
'id' => '=',
'company_id' => '=',
'seeker_id' => '=',
'jobpost_id' => '='
// ...
];
}
|
#include <iostream>
#include <vector>
#include <algorithm>
using namespace std;
class Solution {
public:
bool searchMatrix(vector<vector<int> >& matrix, int target) {
if (matrix.empty())
return false;
if (matrix.size() == 1)
return binary_search(matrix.front().begin(), matrix.front().end(), target);
vector<int>::size_type left = 0;
vector<int>::size_type right = matrix.size()-1;
while (left < right) {
vector<int>::size_type mid = left + (right-left)/2;
if (matrix[mid][0] == target)
return true;
else if (matrix[mid][0] > target) {
if (mid == 0)
return false;
left = mid-1;
} else
right = mid;
}
return binary_search(matrix[right].begin(), matrix[right].end(), target);
}
};
int main(void)
{
return 0;
}
|
class Lista
attr_reader :cabeza, :cola
include Enumerable
def initialize(nodes)
for i in 0..nodes.length-1 do
nodes[i]=Node.new(nodes[i],nil,nil)
end
j = nodes.length
j -= 1
@cola = nodes[0]
@cabeza = nodes[j]
if j > 1
@cola.next = nodes[1]
for i in 1..j-1 do
nodes[i].next = nodes[i+1]
nodes[i].prev = nodes[i-1]
end
@cabeza.prev = nodes[j-1]
else
if j == 1
@cabeza.prev = @cola
@cola.next = @cabeza
else
@cabeza = @cola
end
end
end
def pop_back
raise if @cabeza == @cola
backup = @cabeza.value
@cabeza = @cabeza.prev
backup
end
def pop_front
raise if @cabeza == @cola
backup = @cola.value
@cola = @cola.next
backup
end
def push_front(node)
node = Node.new(node,nil,nil)
@cola.prev = node
node.next = @cola
@cola = node
end
def push_back(node)
node = Node.new(node,nil,nil)
@cabeza.next = node
node.prev = @cabeza
@cabeza = node
end
def push_much_back(nodes)
raise unless nodes.is_a? Array
for i in 0..nodes.length-1 do
nodes[i]=Node.new(nodes[i],nil,nil)
end
j = nodes.length
j -= 1
for i in 0..j do
@cabeza.next = nodes[i]
nodes[i].prev = @cabeza
@cabeza = nodes[i]
end
end
def push_much_front(nodes)
raise unless nodes.is_a? Array
for i in 0..nodes.length-1 do
nodes[i]=Node.new(nodes[i],nil,nil)
end
j = nodes.length
j -= 1
for i in 0..j do
@cola.prev = nodes[i]
nodes[i].next = @cola
@cola = nodes[i]
end
end
def pop_much_front(number)
raise unless number.is_a? Integer
array = []
for i in 0..number-1 do
array[i] = @cola
@cola = cola.next
@cola.prev = nil
end
array
end
def pop_much_back(number)
raise unless number.is_a? Integer
array = []
for i in 0..number-1 do
array[i] = @cabeza
@cabeza = cabeza.prev
@cabeza.next = nil
end
array
end
def toHead
actual = @cola
while actual.next != nil do
actual = actual.next
end
actual
end
def toTail
actual = @cabeza
while actual.prev != nil do
actual = actual.prev
end
actual
end
def each(&block)
actual = @cola
while actual != nil do
yield actual
actual = actual.next
end
end
def to_s
y=""
self.each{ |x| y+=x.value.to_s }
y
end
end
|
#!/usr/bin/env bash
defaults write otest CKFTPTestURL MockServer
defaults write otest CKWebDAVTestURL MockServer
defaults write otest CKSFTPTestURL MockServer
|
export class CD{
constructor(nome,artista,faixas,anoLancamento){
this._nome=nome;
this._artista=artista;
this._anoLancamento=anoLancamento;
this._faixas=faixas;
ouvidoPorInteiro=false;
faixasOuvidas=0;
}
get nome(){
return this._nome;
}
get artista(){
return this._artista;
}
get faixas(){
return this._faixas;
}
get dataLancamento(){
return this._anoLancamento;
}
tocado(faixasOuvidas){
if(faixasOuvidas===faixas){
this._faixas=faixasOuvidas;
ouvidoPorInteiro=true;
}else{
ouvidoPorInteiro=false;
}
}
}
|
// Copyright (C) MongoDB, Inc. 2014-present.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
package mongoimport
import (
"io"
"testing"
"github.com/mongodb/mongo-tools-common/log"
"github.com/mongodb/mongo-tools-common/options"
"github.com/mongodb/mongo-tools-common/testtype"
. "github.com/smartystreets/goconvey/convey"
"go.mongodb.org/mongo-driver/bson"
"gopkg.in/tomb.v2"
)
func init() {
log.SetVerbosity(&options.Verbosity{
VLevel: 4,
})
}
var (
index = uint64(0)
csvConverters = []CSVConverter{
{
colSpecs: []ColumnSpec{
{"field1", new(FieldAutoParser), pgAutoCast, "auto"},
{"field2", new(FieldAutoParser), pgAutoCast, "auto"},
{"field3", new(FieldAutoParser), pgAutoCast, "auto"},
},
data: []string{"a", "b", "c"},
index: index,
},
{
colSpecs: []ColumnSpec{
{"field4", new(FieldAutoParser), pgAutoCast, "auto"},
{"field5", new(FieldAutoParser), pgAutoCast, "auto"},
{"field6", new(FieldAutoParser), pgAutoCast, "auto"},
},
data: []string{"d", "e", "f"},
index: index,
},
{
colSpecs: []ColumnSpec{
{"field7", new(FieldAutoParser), pgAutoCast, "auto"},
{"field8", new(FieldAutoParser), pgAutoCast, "auto"},
{"field9", new(FieldAutoParser), pgAutoCast, "auto"},
},
data: []string{"d", "e", "f"},
index: index,
},
{
colSpecs: []ColumnSpec{
{"field10", new(FieldAutoParser), pgAutoCast, "auto"},
{"field11", new(FieldAutoParser), pgAutoCast, "auto"},
{"field12", new(FieldAutoParser), pgAutoCast, "auto"},
},
data: []string{"d", "e", "f"},
index: index,
},
{
colSpecs: []ColumnSpec{
{"field13", new(FieldAutoParser), pgAutoCast, "auto"},
{"field14", new(FieldAutoParser), pgAutoCast, "auto"},
{"field15", new(FieldAutoParser), pgAutoCast, "auto"},
},
data: []string{"d", "e", "f"},
index: index,
},
}
expectedDocuments = []bson.D{
{
{"field1", "a"},
{"field2", "b"},
{"field3", "c"},
}, {
{"field4", "d"},
{"field5", "e"},
{"field6", "f"},
}, {
{"field7", "d"},
{"field8", "e"},
{"field9", "f"},
}, {
{"field10", "d"},
{"field11", "e"},
{"field12", "f"},
}, {
{"field13", "d"},
{"field14", "e"},
{"field15", "f"},
},
}
)
func convertBSONDToRaw(documents []bson.D) []bson.Raw {
rawBSONDocuments := []bson.Raw{}
for _, document := range documents {
rawBytes, err := bson.Marshal(document)
So(err, ShouldBeNil)
rawBSONDocuments = append(rawBSONDocuments, rawBytes)
}
return rawBSONDocuments
}
func TestValidateFields(t *testing.T) {
testtype.SkipUnlessTestType(t, testtype.UnitTestType)
Convey("Given an import input, in validating the headers", t, func() {
Convey("if the fields contain '..', an error should be thrown", func() {
So(validateFields([]string{"a..a"}), ShouldNotBeNil)
})
Convey("if the fields start/end in a '.', an error should be thrown", func() {
So(validateFields([]string{".a"}), ShouldNotBeNil)
So(validateFields([]string{"a."}), ShouldNotBeNil)
})
Convey("if the fields start in a '$', an error should be thrown", func() {
So(validateFields([]string{"$.a"}), ShouldNotBeNil)
So(validateFields([]string{"$"}), ShouldNotBeNil)
So(validateFields([]string{"$a"}), ShouldNotBeNil)
So(validateFields([]string{"a$a"}), ShouldBeNil)
})
Convey("if the fields collide, an error should be thrown", func() {
So(validateFields([]string{"a", "a.a"}), ShouldNotBeNil)
So(validateFields([]string{"a", "a.ba", "b.a"}), ShouldNotBeNil)
So(validateFields([]string{"a", "a.ba", "b.a"}), ShouldNotBeNil)
So(validateFields([]string{"a", "a.b.c"}), ShouldNotBeNil)
})
Convey("if the fields don't collide, no error should be thrown", func() {
So(validateFields([]string{"a", "aa"}), ShouldBeNil)
So(validateFields([]string{"a", "aa", "b.a", "b.c"}), ShouldBeNil)
So(validateFields([]string{"a", "ba", "ab", "b.a"}), ShouldBeNil)
So(validateFields([]string{"a", "ba", "ab", "b.a", "b.c.d"}), ShouldBeNil)
So(validateFields([]string{"a", "ab.c"}), ShouldBeNil)
})
Convey("if the fields contain the same keys, an error should be thrown", func() {
So(validateFields([]string{"a", "ba", "a"}), ShouldNotBeNil)
})
})
}
func TestGetUpsertValue(t *testing.T) {
testtype.SkipUnlessTestType(t, testtype.UnitTestType)
Convey("Given a field and a BSON document, on calling getUpsertValue", t, func() {
Convey("the value of the key should be correct for unnested documents", func() {
bsonDocument := bson.D{{"a", 3}}
So(getUpsertValue("a", bsonDocument), ShouldEqual, 3)
})
Convey("the value of the key should be correct for nested document fields", func() {
inner := bson.D{{"b", 4}}
bsonDocument := bson.D{{"a", inner}}
So(getUpsertValue("a.b", bsonDocument), ShouldEqual, 4)
})
Convey("the value of the key should be correct for nested document pointer fields", func() {
inner := bson.D{{"b", 4}}
bsonDocument := bson.D{{"a", &inner}}
So(getUpsertValue("a.b", bsonDocument), ShouldEqual, 4)
})
Convey("the value of the key should be nil for unnested document "+
"fields that do not exist", func() {
bsonDocument := bson.D{{"a", 4}}
So(getUpsertValue("c", bsonDocument), ShouldBeNil)
})
Convey("the value of the key should be nil for nested document "+
"fields that do not exist", func() {
inner := bson.D{{"b", 4}}
bsonDocument := bson.D{{"a", inner}}
So(getUpsertValue("a.c", bsonDocument), ShouldBeNil)
})
Convey("the value of the key should be nil for nested document pointer "+
"fields that do not exist", func() {
inner := bson.D{{"b", 4}}
bsonDocument := bson.D{{"a", &inner}}
So(getUpsertValue("a.c", bsonDocument), ShouldBeNil)
})
Convey("the value of the key should be nil for nil document values", func() {
So(getUpsertValue("a", bson.D{{"a", nil}}), ShouldBeNil)
})
})
}
func TestConstructUpsertDocument(t *testing.T) {
testtype.SkipUnlessTestType(t, testtype.UnitTestType)
Convey("Given a set of upsert fields and a BSON document, on calling "+
"constructUpsertDocument", t, func() {
Convey("the key/value combination in the upsert document should be "+
"correct for unnested documents with single fields", func() {
bsonDocument := bson.D{{"a", 3}}
upsertFields := []string{"a"}
upsertDocument := constructUpsertDocument(upsertFields,
bsonDocument)
So(upsertDocument, ShouldResemble, bsonDocument)
})
Convey("the key/value combination in the upsert document should be "+
"correct for unnested documents with several fields", func() {
bsonDocument := bson.D{{"a", 3}, {"b", "string value"}}
upsertFields := []string{"a"}
expectedDocument := bson.D{{"a", 3}}
upsertDocument := constructUpsertDocument(upsertFields,
bsonDocument)
So(upsertDocument, ShouldResemble, expectedDocument)
})
Convey("the key/value combination in the upsert document should be "+
"correct for nested documents with several fields", func() {
inner := bson.D{{testCollection, 4}}
bsonDocument := bson.D{{"a", inner}, {"b", "string value"}}
upsertFields := []string{"a.c"}
expectedDocument := bson.D{{"a.c", 4}}
upsertDocument := constructUpsertDocument(upsertFields,
bsonDocument)
So(upsertDocument, ShouldResemble, expectedDocument)
})
Convey("the upsert document should be nil if the key does not exist "+
"in the BSON document", func() {
bsonDocument := bson.D{{"a", 3}, {"b", "string value"}}
upsertFields := []string{testCollection}
upsertDocument := constructUpsertDocument(upsertFields, bsonDocument)
So(upsertDocument, ShouldBeNil)
})
})
}
func TestSetNestedValue(t *testing.T) {
testtype.SkipUnlessTestType(t, testtype.UnitTestType)
Convey("Given a field, its value, and an existing BSON document...", t, func() {
b := bson.D{{"c", "d"}}
currentDocument := bson.D{
{"a", 3},
{"b", &b},
}
Convey("ensure top level fields are set and others, unchanged", func() {
testDocument := ¤tDocument
expectedDocument := bson.E{"c", 4}
setNestedValue("c", 4, testDocument)
newDocument := *testDocument
So(len(newDocument), ShouldEqual, 3)
So(newDocument[2], ShouldResemble, expectedDocument)
})
Convey("ensure new nested top-level fields are set and others, unchanged", func() {
testDocument := ¤tDocument
expectedDocument := bson.D{{"b", "4"}}
setNestedValue("c.b", "4", testDocument)
newDocument := *testDocument
So(len(newDocument), ShouldEqual, 3)
So(newDocument[2].Key, ShouldResemble, "c")
So(*newDocument[2].Value.(*bson.D), ShouldResemble, expectedDocument)
})
Convey("ensure existing nested level fields are set and others, unchanged", func() {
testDocument := ¤tDocument
expectedDocument := bson.D{{"c", "d"}, {"d", 9}}
setNestedValue("b.d", 9, testDocument)
newDocument := *testDocument
So(len(newDocument), ShouldEqual, 2)
So(newDocument[1].Key, ShouldResemble, "b")
So(*newDocument[1].Value.(*bson.D), ShouldResemble, expectedDocument)
})
Convey("ensure subsequent calls update fields accordingly", func() {
testDocument := ¤tDocument
expectedDocumentOne := bson.D{{"c", "d"}, {"d", 9}}
expectedDocumentTwo := bson.E{"f", 23}
setNestedValue("b.d", 9, testDocument)
newDocument := *testDocument
So(len(newDocument), ShouldEqual, 2)
So(newDocument[1].Key, ShouldResemble, "b")
So(*newDocument[1].Value.(*bson.D), ShouldResemble, expectedDocumentOne)
setNestedValue("f", 23, testDocument)
newDocument = *testDocument
So(len(newDocument), ShouldEqual, 3)
So(newDocument[2], ShouldResemble, expectedDocumentTwo)
})
})
}
func TestRemoveBlankFields(t *testing.T) {
testtype.SkipUnlessTestType(t, testtype.UnitTestType)
Convey("Given an unordered BSON document", t, func() {
Convey("the same document should be returned if there are no blanks", func() {
bsonDocument := bson.D{{"a", 3}, {"b", "hello"}}
So(removeBlankFields(bsonDocument), ShouldResemble, bsonDocument)
})
Convey("a new document without blanks should be returned if there are "+
" blanks", func() {
d := bson.D{
{"a", ""},
{"b", ""},
}
e := bson.D{
{"a", ""},
{"b", 1},
}
bsonDocument := bson.D{
{"a", 0},
{"b", ""},
{"c", ""},
{"d", &d},
{"e", &e},
}
inner := bson.D{
{"b", 1},
}
expectedDocument := bson.D{
{"a", 0},
{"e", inner},
}
So(removeBlankFields(bsonDocument), ShouldResemble, expectedDocument)
})
})
}
func TestTokensToBSON(t *testing.T) {
testtype.SkipUnlessTestType(t, testtype.UnitTestType)
Convey("Given an slice of column specs and tokens to convert to BSON", t, func() {
Convey("the expected ordered BSON should be produced for the given"+
"column specs and tokens", func() {
colSpecs := []ColumnSpec{
{"a", new(FieldAutoParser), pgAutoCast, "auto"},
{"b", new(FieldAutoParser), pgAutoCast, "auto"},
{"c", new(FieldAutoParser), pgAutoCast, "auto"},
}
tokens := []string{"1", "2", "hello"}
expectedDocument := bson.D{
{"a", int32(1)},
{"b", int32(2)},
{"c", "hello"},
}
bsonD, err := tokensToBSON(colSpecs, tokens, uint64(0), false)
So(err, ShouldBeNil)
So(bsonD, ShouldResemble, expectedDocument)
})
Convey("if there are more tokens than fields, additional fields should be prefixed"+
" with 'fields' and an index indicating the header number", func() {
colSpecs := []ColumnSpec{
{"a", new(FieldAutoParser), pgAutoCast, "auto"},
{"b", new(FieldAutoParser), pgAutoCast, "auto"},
{"c", new(FieldAutoParser), pgAutoCast, "auto"},
}
tokens := []string{"1", "2", "hello", "mongodb", "user"}
expectedDocument := bson.D{
{"a", int32(1)},
{"b", int32(2)},
{"c", "hello"},
{"field3", "mongodb"},
{"field4", "user"},
}
bsonD, err := tokensToBSON(colSpecs, tokens, uint64(0), false)
So(err, ShouldBeNil)
So(bsonD, ShouldResemble, expectedDocument)
})
Convey("an error should be thrown if duplicate headers are found", func() {
colSpecs := []ColumnSpec{
{"a", new(FieldAutoParser), pgAutoCast, "auto"},
{"b", new(FieldAutoParser), pgAutoCast, "auto"},
{"field3", new(FieldAutoParser), pgAutoCast, "auto"},
}
tokens := []string{"1", "2", "hello", "mongodb", "user"}
_, err := tokensToBSON(colSpecs, tokens, uint64(0), false)
So(err, ShouldNotBeNil)
})
Convey("fields with nested values should be set appropriately", func() {
colSpecs := []ColumnSpec{
{"a", new(FieldAutoParser), pgAutoCast, "auto"},
{"b", new(FieldAutoParser), pgAutoCast, "auto"},
{"c.a", new(FieldAutoParser), pgAutoCast, "auto"},
}
tokens := []string{"1", "2", "hello"}
c := bson.D{
{"a", "hello"},
}
expectedDocument := bson.D{
{"a", int32(1)},
{"b", int32(2)},
{"c", c},
}
bsonD, err := tokensToBSON(colSpecs, tokens, uint64(0), false)
So(err, ShouldBeNil)
So(expectedDocument[0].Key, ShouldResemble, bsonD[0].Key)
So(expectedDocument[0].Value, ShouldResemble, bsonD[0].Value)
So(expectedDocument[1].Key, ShouldResemble, bsonD[1].Key)
So(expectedDocument[1].Value, ShouldResemble, bsonD[1].Value)
So(expectedDocument[2].Key, ShouldResemble, bsonD[2].Key)
So(expectedDocument[2].Value, ShouldResemble, *bsonD[2].Value.(*bson.D))
})
})
}
func TestProcessDocuments(t *testing.T) {
testtype.SkipUnlessTestType(t, testtype.UnitTestType)
Convey("Given an import worker", t, func() {
index := uint64(0)
csvConverters := []CSVConverter{
{
colSpecs: []ColumnSpec{
{"field1", new(FieldAutoParser), pgAutoCast, "auto"},
{"field2", new(FieldAutoParser), pgAutoCast, "auto"},
{"field3", new(FieldAutoParser), pgAutoCast, "auto"},
},
data: []string{"a", "b", "c"},
index: index,
},
{
colSpecs: []ColumnSpec{
{"field4", new(FieldAutoParser), pgAutoCast, "auto"},
{"field5", new(FieldAutoParser), pgAutoCast, "auto"},
{"field6", new(FieldAutoParser), pgAutoCast, "auto"},
},
data: []string{"d", "e", "f"},
index: index,
},
}
expectedDocuments := []bson.D{
{
{"field1", "a"},
{"field2", "b"},
{"field3", "c"},
}, {
{"field4", "d"},
{"field5", "e"},
{"field6", "f"},
},
}
Convey("processDocuments should execute the expected conversion for documents, "+
"pass then on the output channel, and close the input channel if ordered is true", func() {
inputChannel := make(chan Converter, 100)
outputChannel := make(chan bson.D, 100)
iw := &importWorker{
unprocessedDataChan: inputChannel,
processedDocumentChan: outputChannel,
tomb: &tomb.Tomb{},
}
inputChannel <- csvConverters[0]
inputChannel <- csvConverters[1]
close(inputChannel)
So(iw.processDocuments(true), ShouldBeNil)
doc1, open := <-outputChannel
So(doc1, ShouldResemble, expectedDocuments[0])
So(open, ShouldEqual, true)
doc2, open := <-outputChannel
So(doc2, ShouldResemble, expectedDocuments[1])
So(open, ShouldEqual, true)
_, open = <-outputChannel
So(open, ShouldEqual, false)
})
Convey("processDocuments should execute the expected conversion for documents, "+
"pass then on the output channel, and leave the input channel open if ordered is false", func() {
inputChannel := make(chan Converter, 100)
outputChannel := make(chan bson.D, 100)
iw := &importWorker{
unprocessedDataChan: inputChannel,
processedDocumentChan: outputChannel,
tomb: &tomb.Tomb{},
}
inputChannel <- csvConverters[0]
inputChannel <- csvConverters[1]
close(inputChannel)
So(iw.processDocuments(false), ShouldBeNil)
doc1, open := <-outputChannel
So(doc1, ShouldResemble, expectedDocuments[0])
So(open, ShouldEqual, true)
doc2, open := <-outputChannel
So(doc2, ShouldResemble, expectedDocuments[1])
So(open, ShouldEqual, true)
// close will throw a runtime error if outputChannel is already closed
close(outputChannel)
})
})
}
func TestDoSequentialStreaming(t *testing.T) {
testtype.SkipUnlessTestType(t, testtype.UnitTestType)
Convey("Given some import workers, a Converters input channel and an bson.D output channel", t, func() {
inputChannel := make(chan Converter, 5)
outputChannel := make(chan bson.D, 5)
workerInputChannel := []chan Converter{
make(chan Converter),
make(chan Converter),
}
workerOutputChannel := []chan bson.D{
make(chan bson.D),
make(chan bson.D),
}
importWorkers := []*importWorker{
{
unprocessedDataChan: workerInputChannel[0],
processedDocumentChan: workerOutputChannel[0],
tomb: &tomb.Tomb{},
},
{
unprocessedDataChan: workerInputChannel[1],
processedDocumentChan: workerOutputChannel[1],
tomb: &tomb.Tomb{},
},
}
Convey("documents moving through the input channel should be processed and returned in sequence", func() {
// start goroutines to do sequential processing
for _, iw := range importWorkers {
go iw.processDocuments(true)
}
// feed in a bunch of documents
for _, inputCSVDocument := range csvConverters {
inputChannel <- inputCSVDocument
}
close(inputChannel)
doSequentialStreaming(importWorkers, inputChannel, outputChannel)
for _, document := range expectedDocuments {
So(<-outputChannel, ShouldResemble, document)
}
})
})
}
func TestStreamDocuments(t *testing.T) {
testtype.SkipUnlessTestType(t, testtype.UnitTestType)
Convey(`Given:
1. a boolean indicating streaming order
2. an input channel where documents are streamed in
3. an output channel where processed documents are streamed out`, t, func() {
inputChannel := make(chan Converter, 5)
outputChannel := make(chan bson.D, 5)
Convey("the entire pipeline should complete without error under normal circumstances", func() {
// stream in some documents
for _, csvConverter := range csvConverters {
inputChannel <- csvConverter
}
close(inputChannel)
So(streamDocuments(true, 3, inputChannel, outputChannel), ShouldBeNil)
// ensure documents are streamed out and processed in the correct manner
for _, expectedDocument := range expectedDocuments {
So(<-outputChannel, ShouldResemble, expectedDocument)
}
})
Convey("the entire pipeline should complete with error if an error is encountered", func() {
// stream in some documents - create duplicate headers to simulate an error
csvConverter := CSVConverter{
colSpecs: []ColumnSpec{
{"field1", new(FieldAutoParser), pgAutoCast, "auto"},
{"field2", new(FieldAutoParser), pgAutoCast, "auto"},
},
data: []string{"a", "b", "c"},
index: uint64(0),
}
inputChannel <- csvConverter
close(inputChannel)
// ensure that an error is returned on the error channel
So(streamDocuments(true, 3, inputChannel, outputChannel), ShouldNotBeNil)
})
})
}
func TestChannelQuorumError(t *testing.T) {
testtype.SkipUnlessTestType(t, testtype.UnitTestType)
Convey("Given a channel and a quorum...", t, func() {
Convey("an error should be returned if one is received", func() {
ch := make(chan error, 2)
ch <- nil
ch <- io.EOF
So(channelQuorumError(ch, 2), ShouldNotBeNil)
})
Convey("no error should be returned if none is received", func() {
ch := make(chan error, 2)
ch <- nil
ch <- nil
So(channelQuorumError(ch, 2), ShouldBeNil)
})
Convey("no error should be returned if up to quorum nil errors are received", func() {
ch := make(chan error, 3)
ch <- nil
ch <- nil
ch <- io.EOF
So(channelQuorumError(ch, 2), ShouldBeNil)
})
})
}
|
// tslint:disable:no-bitwise
export interface IPersonaProfile {
id: string;
displayName?: string;
jobTitle?: string;
department?: string;
email?: string;
businessPhone?: string;
imAddress?: string;
officeLocation?: string;
city?: string;
companyName?: string;
}
export enum ShowModeFlags {
Name = 1,
Title = 2,
Department = 4
}
export enum PersonaShowMode {
NameOnly = ShowModeFlags.Name,
NameTitle = ShowModeFlags.Name | ShowModeFlags.Title,
NameTitleDepartment = ShowModeFlags.Name | ShowModeFlags.Title | ShowModeFlags.Department
}
export type ResolveFunc<T> = (value?: T | PromiseLike<T> | undefined) => void;
// tslint:disable-next-line:no-any
export type RejectFunc = (reason?: any) => void;
|
<?php
namespace App\Http\Controllers;
use App\Models\todolist;
use Illuminate\Http\Request;
use Illuminate\Validation\Validator;
class TodolistController extends Controller
{
public function showAllData(){
return view('fetchedData')->with('todoArray', todolist::paginate(10));
}
public function delete($id){
todolist::destroy('id', $id);
return redirect('/');
}
public function createView(){
return view('create');
}
public function createTodo(Request $request){
$request->validate([
'task' => 'required|max:255',
'status' => 'required',
]);
$todo = new todolist();
$todo->todo = $request->input('task');
$todo->status = $request->input('status');
$todo->save();
return redirect('/');
}
public function editView($id){
return view('edit')->with('todo', todolist::find($id));
}
public function updateTodo($id, Request $request){
$request->validate([
'task' => 'required|max:255',
'status' => 'required',
]);
$todo = todolist::find($id);
if ($todo->status === 2){
return redirect('edit/'.$id)->withErrors(array("status" => "The task has been completed already."));
}
$todo->todo = $request->input('task');
$todo->status = $request->input('status');
$todo->save();
return redirect('/');
}
}
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace MemeticApplication.MemeticLibrary.Heuristics
{
public enum AvailableHeuristics
{
SIMULATED_ANNEALING = 0 // default heuristics
}
}
|
---
title: '接口和数据安全'
---
## MD5、SHA1的引用场景
## 接口幂等性的实现方法
## 对称加密和非对称加密的算法原理
## 保证通信安全又哪些手段:请求防篡改、Https、IP验证、终端校验
## Web API接口安全手段:认证和鉴权、加密和签名、访问频次限制、漏洞、注入攻击、敏感数据泄露、越权访问
|
main :: IO ()
main = do contents <- readFile filename
case drop 6 $ lines contents of
[] -> error "File has less than seven lines"
l:_ -> putStrLn l
where filename = "testfile"
|
2021年01月03日11时数据
Status: 200
1.于正
微博热度:4778736
2.小伙因买内衣不合适迎亲被拒
微博热度:3480205
3.王力宏说王一博这孩子不得了
微博热度:2416361
4.易烊千玺两部电影里都剃了光头
微博热度:1663000
5.夏东海的儿子都喜欢戴头套
微博热度:1567742
6.新冠病毒疫苗接种须知
微博热度:1466564
7.河北疫情
微博热度:1118986
8.爱尔眼科
微博热度:1091044
9.数百人不戴口罩参加特朗普跨年派对
微博热度:1085878
10.广东从英国输入病例发现B.1.1.7突变株
微博热度:1084700
11.李梦发文
微博热度:1084669
12.肖战跨年vlog
微博热度:1083158
13.王俊凯工作室报平安
微博热度:949077
14.河北新增1例本土确诊
微博热度:934765
15.比特币突破3.2万美元
微博热度:933614
16.跨年晚会上的真正看点
微博热度:933339
17.王彦霖李一桐亲了个寂寞
微博热度:905265
18.当我请假没去学校时
微博热度:690904
19.沃尔好拼
微博热度:678265
20.人脸识别抓错人让一男子蹲10天监狱
微博热度:673358
21.张颂文演的试镜失败原型是包贝尔
微博热度:636243
22.郭品超 我就像一张白纸
微博热度:595574
23.蔚来回应官网遭遇大量退单
微博热度:527467
24.超简单的小甜品
微博热度:522674
25.经纪人否认昆凌怀第三胎
微博热度:514436
26.雪地里面吃火锅
微博热度:507850
27.任正非要求收缩华为企业业务
微博热度:500267
28.林郑月娥谈到香港国安法满脸欣慰
微博热度:496397
29.哈登伤停缺席
微博热度:491033
30.哈尔滨漫展主办方回应不雅拍照
微博热度:485181
31.奇葩说一派胡言环节
微博热度:424901
32.美国主持人拉里金感染新冠
微博热度:418150
33.元旦假期最后一天
微博热度:382830
34.爱立信称继续禁华为将离开瑞典
微博热度:346718
35.巴啦啦小魔仙演员悼念孙侨潞
微博热度:344531
36.2021有个励志的数字21
微博热度:313351
37.一句话教你怼杠精
微博热度:300906
38.肖战工作室土味横幅
微博热度:294678
39.警校生雪中反恐训练秒变蜘蛛侠
微博热度:294257
40.韩国4G网速变慢引发不满
微博热度:286492
41.郑爽爬墙的速度
微博热度:253322
42.城市地下空间利用有哪些问题
微博热度:253280
43.北京时间是在西安产生的
微博热度:253256
44.5米外飞扑克牌能削开易拉罐
微博热度:253251
45.野人竟是我自己
微博热度:253250
46.高铁一霸座女被罚款200元
微博热度:249475
47.没那么爱ta婚礼当天要逃婚吗
微博热度:247922
48.北京奥运举重冠军回应兴奋剂事件
微博热度:243260
49.奚梦瑶追星成功
微博热度:242511
50.流金岁月
微博热度:240972
|
/**
* ap - Abstract product
* Declares an interface for a type of product object.
*/
package com.gof.creational.abstract_factory.ap;
|
package ru.proshik.english.quizlet.telegramBot.queue
import org.apache.log4j.Logger
import org.springframework.stereotype.Component
import java.util.concurrent.LinkedBlockingDeque
@Component
class NotificationQueue : Queue {
companion object {
private val LOG = Logger.getLogger(NotificationQueue::class.java)
}
private val blockingQueue = LinkedBlockingDeque<Queue.Message>()
override fun put(message: Queue.Message) {
blockingQueue.add(message)
}
override fun take(): Queue.Message {
return blockingQueue.take()
}
}
|
<?php
/*
* File:
* src/Templates/Admin/Dashboard/index.ctp
* Description:
* Administration - Dashboard
* Layout element:
* --
*/
// Page title
$this->assign('title', __d('elabs', 'Dashboard'));
// Breadcrumbs
$this->Html->addCrumb($this->fetch('title'));
echo $this->element('layout/dev_block');
|
//
// BXCommentView.h
// BXlive
//
// Created by bxlive on 2019/5/6.
// Copyright © 2019 cat. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "BXHMovieModel.h"
#define CommentViewTag 10099
@protocol DynCommentViewDelegate <NSObject>
//关闭
- (void)deleteCommentView;
//点击头像和昵称跳转个人主页
- (void)toPersonHomeWithUserID:(NSString *)userID;
@end
@interface GHDynCommentView : UIView
@property (nonatomic, copy) void (^commentNumChanged)(BOOL isAdd, BXCommentModel *comment);
@property (nonatomic ,weak) id<DynCommentViewDelegate>delegate;
- (instancetype)initWitBXHMovieModel:(BXHMovieModel *)model;
- (void)showInView:(UIView *)view;
- (void)showInView:(UIView *)view isWrite:(BOOL)isWrite topComment:(BXCommentModel *)topComment;
- (void)commentViewhidden;
@end
|
from reportlab.pdfgen import canvas
from reportlab.platypus import SimpleDocTemplate
class NumberedCanvas(canvas.Canvas):
def __init__(self, *args, **kwargs):
canvas.Canvas.__init__(self, *args, **kwargs)
self._codes = []
def showPage(self):
self._codes.append({'code': self._code, 'stack': self._codeStack})
self._startPage()
def save(self):
"""add page info to each page (page x of y)"""
# reset page counter
self._pageNumber = 0
for code in self._codes:
# recall saved page
self._code = code['code']
self._codeStack = code['stack']
self.setFont("Helvetica", 7)
self.drawRightString(200*mm, 20*mm,
"page %(this)i of %(total)i" % {
'this': self._pageNumber+1,
'total': len(self._codes),
}
)
canvas.Canvas.showPage(self)
# build doc
doc = SimpleDocTemplate("filename.pdf")
... # add your report definition here
doc.build(elements, canvasmaker=NumberedCanvas)
|
<?php
namespace FilippoToso\Travelport\GDSQueue;
class Criteria
{
/**
* @var anonymous1188 $Order
*/
protected $Order = null;
/**
* @var anonymous1189 $Type
*/
protected $Type = null;
/**
* @param anonymous1188 $Order
* @param anonymous1189 $Type
*/
public function __construct($Order = null, $Type = null)
{
$this->Order = $Order;
$this->Type = $Type;
}
/**
* @return anonymous1188
*/
public function getOrder()
{
return $this->Order;
}
/**
* @param anonymous1188 $Order
* @return \FilippoToso\Travelport\GDSQueue\Criteria
*/
public function setOrder($Order)
{
$this->Order = $Order;
return $this;
}
/**
* @return anonymous1189
*/
public function getType()
{
return $this->Type;
}
/**
* @param anonymous1189 $Type
* @return \FilippoToso\Travelport\GDSQueue\Criteria
*/
public function setType($Type)
{
$this->Type = $Type;
return $this;
}
}
|
(ns p4f.routes.auth
(:require [p4f.resources.auth :refer [signup]]
[compojure.core :refer [ANY]]))
(def auth-routes
(ANY "/signup" [] (signup)))
|
#!/bin/bash
chmod +x *
source ./hash_id.conf
awk '{
print $0
}' | python ./hash_id.py ${TEST_HASH_NUM}
|
fname = '/home/dmalt/Data/cognigraph/data/Koleno.vhdr'
# --------- prepare channels ------------ #
ch_path = '/home/dmalt/Data/cognigraph/channel_BrainProducts_ActiCap_128.mat'
ch_struct = loadmat(ch_path)
kind = ch_struct['Comment'][0]
chans = ch_struct['Channel'][0]
ch_locs = np.empty([len(chans), 3])
ch_types = [None] * len(chans)
ch_names = [None] * len(chans)
selection = np.arange(len(chans))
for i_ch, chan in enumerate(chans):
ch_names[i_ch] = chan[0][0]
ch_types[i_ch] = chan[2][0]
ch_locs[i_ch] = chan[4][:, 0]
ch_locs[:,0:2] = ch_locs[:,-2:-4:-1]
ch_locs[:,0] = -ch_locs[:,0]
# ch_locs = ch_locs * 2
# ch_names[ch_names.index('OI1h')] = 'Ol1h'
# ch_names[ch_names.index('OI2h')] = 'Ol2h'
ch_names[ch_names.index('GND')] = 'AFz'
ch_names[ch_names.index('REF')] = 'FCz'
# ch_names[ch_names.index('TPP9h')] = 'TTP9h'
montage = Montage(ch_locs, ch_names, kind, selection)
# montage.plot()
raw = Raw(fname, preload=True)
raw.set_montage(montage)
raw.info['bads'] = ['F5', 'PPO10h', 'C5', 'FCC2h', 'F2', 'VEOG']
raw_c = raw.copy()
|
import { Matrix, MatrixArrayTypes } from '../core/matrix';
import { NeuralFunction } from './function';
import { NeuralSigmoidFunction } from './functions/sigmoid';
import { NeuralLayer } from './layer';
/**
* All neural input types.
*/
export type NeuralInputTypes = MatrixArrayTypes | number[];
/**
* Neural network class.
*/
export class NeuralNetwork {
/**
* All layers.
*/
#layers: NeuralLayer[] = [];
/**
* Default activation function.
*/
#activation: NeuralFunction;
/**
* Learning rate.
*/
#rate: number;
/**
* Initialize all the layers.
* @param layers Array containing the max number of neurons per layer.
*/
#initialize(layers: number[]): void {
let last;
for (const neurons of layers) {
if (last !== void 0) {
const layer = new NeuralLayer(neurons, last, this.#activation);
this.#layers.push(layer);
}
last = neurons;
}
}
/**
* Process all layers with the given input.
* @param input Input values.
* @returns Returns an array containing all the resulting matrices.
*/
#processAll(input: NeuralInputTypes): Matrix[] {
let result = Matrix.fromArray(input);
const output = [result];
for (const layer of this.#layers) {
result = layer.process(result);
output.push(result);
}
return output;
}
/**
* Multiply the first matrix by the second one using the hadamard multiplication.
* @param first First matrix.
* @param second Second matrix.
* @returns Returns a new matrix containing the results.
*/
#hadamardMultiply(first: Matrix, second: Matrix): Matrix {
return first.map((value, row, column) => value * second.at(row, column)!);
}
/**
* Multiply the given matrix using the scalar value.
* @param input Input matrix.
* @param value Scalar value.
* @returns Returns a new matrix containing the results.
*/
#scalarMultiply(input: Matrix, value: number): Matrix {
return input.map((current) => current * value);
}
/**
* Calculate the gradient descent for the given input and errors.
* @param input Input matrix.
* @param errors Errors matrix.
* @returns Returns a new matrix containing the results.
*/
#gradientDescent(input: Matrix, errors: Matrix): Matrix {
return this.#scalarMultiply(
this.#hadamardMultiply(
input.map((value) => this.#activation.derivative(value)),
errors
),
this.#rate
);
}
/**
* Default constructor.
* @param layers Array containing the max number of neurons per layer.
* @param rate Optional learning rate.
* @param activation Optional default activation function.
*/
constructor(layers: number[], rate?: number, activation?: NeuralFunction) {
this.#activation = activation ?? new NeuralSigmoidFunction();
this.#rate = rate ?? 0.1;
this.#initialize(layers);
}
/**
* Train the network to adjust its answer according to the given input and the expected values.
* @param input Input values.
* @param expect Expected output values.
*/
train(input: number[], expect: number[]): void {
const output = this.#processAll(input);
let errors = Matrix.fromArray(expect).subtract(output[output.length - 1]);
for (let index = output.length - 1; index > 0; --index) {
const layer = this.#layers[index - 1];
const bias = this.#gradientDescent(output[index], errors);
const weight = bias.multiply(output[index - 1].transpose());
NeuralLayer.adjust(layer, weight, bias);
if (index > 1) {
errors = layer.weight.transpose().multiply(errors);
}
}
}
/**
* Predict the best answer for the given input.
* @param input Input values.
* @returns Returns the output values for the best answer.
*/
predict(input: NeuralInputTypes): number[] {
const output = this.#processAll(input);
const offset = output.length - 1;
return output[offset].data;
}
/**
* Get the default activation function.
*/
get activation(): NeuralFunction {
return this.#activation;
}
/**
* Get the learning rate.
*/
get rate(): number {
return this.#rate;
}
/**
* Create a new neural network based on the crossover of the given networks.
* @param network1 First network.
* @param network2 Second network.
* @returns Returns the generated neural network.
*/
static fromCrossover(network1: NeuralNetwork, network2: NeuralNetwork): NeuralNetwork {
const result = new NeuralNetwork([], (network1.rate + network2.rate) / 2, network1.#activation);
for (let index = 0; index < network1.#layers.length; ++index) {
const layer = NeuralLayer.fromCrossover(network1.#layers[index], network2.#layers[index]);
result.#layers.push(layer);
}
return result;
}
/**
* Create a new neural network filled with random values.
* @param layers Array containing the max number of neurons per layer.
* @param rate Optional learning rate.
* @param activation Optional activation function.
* @returns Returns the generated neural network.
*/
static fromRandom(layers: number[], rate?: number, activation?: NeuralFunction): NeuralNetwork {
const result = new NeuralNetwork(layers, rate, activation);
for (const layer of result.#layers) {
NeuralLayer.randomize(layer, -1, 1);
}
return result;
}
/**
* Mutate all layers for the given neural network.
* @param network Input network.
* @param min Min mutation value.
* @param max Max mutation value.
* @param rate Mutation rate.
*/
static mutate(network: NeuralNetwork, min: number, max: number, rate: number): void {
for (const layer of network.#layers) {
NeuralLayer.mutate(layer, min, max, rate);
}
}
}
|
/*
gg_shape.c -- Gaia shapefile handling
version 4.3, 2015 June 29
Author: Sandro Furieri a.furieri@lqt.it
------------------------------------------------------------------------------
Version: MPL 1.1/GPL 2.0/LGPL 2.1
The contents of this file are subject to the Mozilla Public License Version
1.1 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.mozilla.org/MPL/
Software distributed under the License is distributed on an "AS IS" basis,
WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
for the specific language governing rights and limitations under the
License.
The Original Code is the SpatiaLite library
The Initial Developer of the Original Code is Alessandro Furieri
Portions created by the Initial Developer are Copyright (C) 2008-2015
the Initial Developer. All Rights Reserved.
Contributor(s):
Alternatively, the contents of this file may be used under the terms of
either the GNU General Public License Version 2 or later (the "GPL"), or
the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
in which case the provisions of the GPL or the LGPL are applicable instead
of those above. If you wish to allow use of your version of this file only
under the terms of either the GPL or the LGPL, and not to allow others to
use your version of this file under the terms of the MPL, indicate your
decision by deleting the provisions above and replace them with the notice
and other provisions required by the GPL or the LGPL. If you do not delete
the provisions above, a recipient may use your version of this file under
the terms of any one of the MPL, the GPL or the LGPL.
*/
#include <sys/types.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <math.h>
#include <float.h>
#include <errno.h>
#if defined(_WIN32) && !defined(__MINGW32__)
#include "config-msvc.h"
#else
#include "config.h"
#endif
#if OMIT_ICONV == 0 /* if ICONV is disabled no SHP support is available */
#if defined(__MINGW32__) || defined(_WIN32)
#define LIBICONV_STATIC
#include <iconv.h>
#define LIBCHARSET_STATIC
#ifdef _MSC_VER
/* <localcharset.h> isn't supported on OSGeo4W */
/* applying a tricky workaround to fix this issue */
extern const char *locale_charset (void);
#else /* sane Windows - not OSGeo4W */
#include <localcharset.h>
#endif /* end localcharset */
#else /* not MINGW32 */
#if defined(__APPLE__) || defined(__ANDROID__)
#include <iconv.h>
#include <localcharset.h>
#else /* neither Mac OsX nor Android */
#include <iconv.h>
#include <langinfo.h>
#endif
#endif
#include <spatialite/sqlite.h>
#include <spatialite/gaiageo.h>
#include <spatialite/debug.h>
#ifdef _WIN32
#define atoll _atoi64
#endif /* not WIN32 */
/* 64 bit integer: portable format for printf() */
#if defined(_WIN32) && !defined(__MINGW32__)
#define FRMT64 "%I64d"
#else
#define FRMT64 "%lld"
#endif
#define SHAPEFILE_NO_DATA 1e-38
#ifdef _WIN32
#define strcasecmp _stricmp
#endif /* not WIN32 */
struct auxdbf_fld
{
/* auxiliary DBF field struct */
char *name;
struct auxdbf_fld *next;
};
struct auxdbf_list
{
/* auxiliary DBF struct */
struct auxdbf_fld *first;
struct auxdbf_fld *last;
};
GAIAGEO_DECLARE void
gaiaFreeValue (gaiaValuePtr p)
{
/* frees all memory allocations for this DBF Field value */
if (!p)
return;
if (p->TxtValue)
free (p->TxtValue);
free (p);
}
GAIAGEO_DECLARE void
gaiaSetNullValue (gaiaDbfFieldPtr field)
{
/* assignes a NULL value to some DBF field */
if (field->Value)
gaiaFreeValue (field->Value);
field->Value = malloc (sizeof (gaiaValue));
field->Value->Type = GAIA_NULL_VALUE;
field->Value->TxtValue = NULL;
}
GAIAGEO_DECLARE void
gaiaSetIntValue (gaiaDbfFieldPtr field, sqlite3_int64 value)
{
/* assignes an INTEGER value to some DBF field */
if (field->Value)
gaiaFreeValue (field->Value);
field->Value = malloc (sizeof (gaiaValue));
field->Value->Type = GAIA_INT_VALUE;
field->Value->TxtValue = NULL;
field->Value->IntValue = value;
}
GAIAGEO_DECLARE void
gaiaSetDoubleValue (gaiaDbfFieldPtr field, double value)
{
/* assignes a DOUBLE value to some DBF field */
if (field->Value)
gaiaFreeValue (field->Value);
field->Value = malloc (sizeof (gaiaValue));
field->Value->Type = GAIA_DOUBLE_VALUE;
field->Value->TxtValue = NULL;
field->Value->DblValue = value;
}
GAIAGEO_DECLARE void
gaiaSetStrValue (gaiaDbfFieldPtr field, char *str)
{
/* assignes a STRING value to some DBF field */
int len = strlen (str);
if (field->Value)
gaiaFreeValue (field->Value);
field->Value = malloc (sizeof (gaiaValue));
field->Value->Type = GAIA_TEXT_VALUE;
field->Value->TxtValue = malloc (len + 1);
strcpy (field->Value->TxtValue, str);
}
GAIAGEO_DECLARE gaiaDbfFieldPtr
gaiaAllocDbfField (char *name, unsigned char type,
int offset, unsigned char length, unsigned char decimals)
{
/* allocates and initializes a DBF Field definition */
gaiaDbfFieldPtr p = malloc (sizeof (gaiaDbfField));
int len = strlen (name);
p->Name = malloc (len + 1);
strcpy (p->Name, name);
p->Type = type;
p->Offset = offset;
p->Length = length;
p->Decimals = decimals;
p->Value = NULL;
p->Next = NULL;
return p;
}
GAIAGEO_DECLARE void
gaiaFreeDbfField (gaiaDbfFieldPtr p)
{
/* frees all memory allocations for this DBF Field definition */
if (!p)
return;
if (p->Name)
free (p->Name);
if (p->Value)
gaiaFreeValue (p->Value);
free (p);
}
GAIAGEO_DECLARE gaiaDbfFieldPtr
gaiaCloneDbfField (gaiaDbfFieldPtr org)
{
/* creating a new DBF LIST copied from the original one */
gaiaDbfFieldPtr p = malloc (sizeof (gaiaDbfField));
int len = strlen (org->Name);
p->Name = malloc (len + 1);
strcpy (p->Name, org->Name);
p->Type = org->Type;
p->Offset = org->Offset;
p->Length = org->Length;
p->Decimals = org->Decimals;
p->Value = gaiaCloneValue (org->Value);
p->Next = NULL;
return p;
}
GAIAGEO_DECLARE gaiaDbfListPtr
gaiaAllocDbfList ()
{
/* allocates and initializes the DBF Fields list */
gaiaDbfListPtr list = malloc (sizeof (gaiaDbfList));
list->RowId = 0;
list->Geometry = NULL;
list->First = NULL;
list->Last = NULL;
return list;
}
GAIAGEO_DECLARE void
gaiaFreeDbfList (gaiaDbfListPtr list)
{
/* frees all memory allocations related to DBF Fields list */
gaiaDbfFieldPtr p;
gaiaDbfFieldPtr pn;
if (!list)
return;
p = list->First;
while (p)
{
pn = p->Next;
gaiaFreeDbfField (p);
p = pn;
}
if (list->Geometry)
gaiaFreeGeomColl (list->Geometry);
free (list);
}
GAIAGEO_DECLARE int
gaiaIsValidDbfList (gaiaDbfListPtr list)
{
/* checks if the DBF fields list contains any invalid data type */
gaiaDbfFieldPtr p;
if (!list)
return 0;
p = list->First;
while (p)
{
if (p->Type == 'N' || p->Type == 'C' || p->Type == 'L'
|| p->Type == 'D' || p->Type == 'F')
;
else
return 0;
p = p->Next;
}
return 1;
}
GAIAGEO_DECLARE gaiaDbfFieldPtr
gaiaAddDbfField (gaiaDbfListPtr list, char *name, unsigned char type,
int offset, unsigned char length, unsigned char decimals)
{
/* inserts a Field in the DBF Fields list */
gaiaDbfFieldPtr p;
if (!list)
return NULL;
p = gaiaAllocDbfField (name, type, offset, length, decimals);
if (!(list->First))
list->First = p;
if (list->Last)
list->Last->Next = p;
list->Last = p;
return p;
}
GAIAGEO_DECLARE void
gaiaResetDbfEntity (gaiaDbfListPtr list)
{
/* resets data values */
gaiaDbfFieldPtr p;
if (!list)
return;
p = list->First;
while (p)
{
if (p->Value)
gaiaFreeValue (p->Value);
p->Value = NULL;
p = p->Next;
}
if (list->Geometry)
gaiaFreeGeomColl (list->Geometry);
list->Geometry = NULL;
}
GAIAGEO_DECLARE gaiaValuePtr
gaiaCloneValue (gaiaValuePtr org)
{
/* creating a new VARIANT value copied from the original one */
gaiaValuePtr value;
int len;
value = malloc (sizeof (gaiaValue));
value->Type = GAIA_NULL_VALUE;
value->TxtValue = NULL;
switch (org->Type)
{
case GAIA_INT_VALUE:
value->Type = GAIA_INT_VALUE;
value->IntValue = org->IntValue;
break;
case GAIA_DOUBLE_VALUE:
value->Type = GAIA_DOUBLE_VALUE;
value->DblValue = org->DblValue;
break;
case GAIA_TEXT_VALUE:
value->Type = GAIA_TEXT_VALUE;
len = strlen (org->TxtValue);
value->TxtValue = malloc (len + 1);
strcpy (value->TxtValue, org->TxtValue);
};
return value;
}
GAIAGEO_DECLARE gaiaDbfListPtr
gaiaCloneDbfEntity (gaiaDbfListPtr org)
{
/* creating a new DBF LIST copied from the original one */
gaiaDbfFieldPtr p;
gaiaDbfFieldPtr newFld;
gaiaDbfListPtr entity = gaiaAllocDbfList ();
entity->RowId = org->RowId;
if (org->Geometry)
entity->Geometry = gaiaCloneGeomColl (org->Geometry);
p = org->First;
while (p)
{
newFld =
gaiaAddDbfField (entity, p->Name, p->Type, p->Offset, p->Length,
p->Decimals);
if (p->Value)
newFld->Value = gaiaCloneValue (p->Value);
p = p->Next;
}
return entity;
}
GAIAGEO_DECLARE gaiaShapefilePtr
gaiaAllocShapefile ()
{
/* allocates and initializes the Shapefile object */
gaiaShapefilePtr shp = malloc (sizeof (gaiaShapefile));
shp->endian_arch = 1;
shp->Path = NULL;
shp->Shape = -1;
shp->EffectiveType = GAIA_UNKNOWN;
shp->EffectiveDims = GAIA_XY;
shp->flShp = NULL;
shp->flShx = NULL;
shp->flDbf = NULL;
shp->Dbf = NULL;
shp->BufShp = NULL;
shp->ShpBfsz = 0;
shp->BufDbf = NULL;
shp->DbfHdsz = 0;
shp->DbfReclen = 0;
shp->DbfSize = 0;
shp->DbfRecno = 0;
shp->ShpSize = 0;
shp->ShxSize = 0;
shp->MinX = DBL_MAX;
shp->MinY = DBL_MAX;
shp->MaxX = -DBL_MAX;
shp->MaxY = -DBL_MAX;
shp->Valid = 0;
shp->IconvObj = NULL;
shp->LastError = NULL;
return shp;
}
GAIAGEO_DECLARE void
gaiaFreeShapefile (gaiaShapefilePtr shp)
{
/* frees all memory allocations related to the Shapefile object */
if (shp->Path)
free (shp->Path);
if (shp->flShp)
fclose (shp->flShp);
if (shp->flShx)
fclose (shp->flShx);
if (shp->flDbf)
fclose (shp->flDbf);
if (shp->Dbf)
gaiaFreeDbfList (shp->Dbf);
if (shp->BufShp)
free (shp->BufShp);
if (shp->BufDbf)
free (shp->BufDbf);
if (shp->IconvObj)
iconv_close ((iconv_t) shp->IconvObj);
if (shp->LastError)
free (shp->LastError);
free (shp);
}
GAIAGEO_DECLARE void
gaiaOpenShpRead (gaiaShapefilePtr shp, const char *path, const char *charFrom,
const char *charTo)
{
/* trying to open the shapefile and initial checkings */
FILE *fl_shx = NULL;
FILE *fl_shp = NULL;
FILE *fl_dbf = NULL;
char xpath[1024];
int rd;
unsigned char buf_shx[256];
unsigned char *buf_shp = NULL;
int buf_size = 1024;
int shape;
unsigned char bf[1024];
int dbf_size;
int dbf_reclen = 0;
int off_dbf;
int ind;
char field_name[2048];
char *sys_err;
char errMsg[1024];
iconv_t iconv_ret;
char utf8buf[2048];
#if !defined(__MINGW32__) && defined(_WIN32)
const char *pBuf;
#else /* not WIN32 */
char *pBuf;
#endif
size_t len;
size_t utf8len;
char *pUtf8buf;
int endian_arch = gaiaEndianArch ();
gaiaDbfListPtr dbf_list = NULL;
if (charFrom && charTo)
{
iconv_ret = iconv_open (charTo, charFrom);
if (iconv_ret == (iconv_t) (-1))
{
sprintf (errMsg, "conversion from '%s' to '%s' not available\n",
charFrom, charTo);
goto unsupported_conversion;
}
shp->IconvObj = iconv_ret;
}
else
{
sprintf (errMsg, "a NULL charset-name was passed\n");
goto unsupported_conversion;
}
if (shp->flShp != NULL || shp->flShx != NULL || shp->flDbf != NULL)
{
sprintf (errMsg,
"attempting to reopen an already opened Shapefile\n");
goto unsupported_conversion;
}
sprintf (xpath, "%s.shx", path);
fl_shx = fopen (xpath, "rb");
if (!fl_shx)
{
sys_err = strerror (errno);
sprintf (errMsg, "unable to open '%s' for reading: %s", xpath,
sys_err);
goto no_file;
}
sprintf (xpath, "%s.shp", path);
fl_shp = fopen (xpath, "rb");
if (!fl_shp)
{
sys_err = strerror (errno);
sprintf (errMsg, "unable to open '%s' for reading: %s", xpath,
sys_err);
goto no_file;
}
sprintf (xpath, "%s.dbf", path);
fl_dbf = fopen (xpath, "rb");
if (!fl_dbf)
{
sys_err = strerror (errno);
sprintf (errMsg, "unable to open '%s' for reading: %s", xpath,
sys_err);
goto no_file;
}
/* reading SHX file header */
rd = fread (buf_shx, sizeof (unsigned char), 100, fl_shx);
if (rd != 100)
goto error;
if (gaiaImport32 (buf_shx + 0, GAIA_BIG_ENDIAN, endian_arch) != 9994) /* checks the SHX magic number */
goto error;
/* reading SHP file header */
buf_shp = malloc (sizeof (unsigned char) * buf_size);
rd = fread (buf_shp, sizeof (unsigned char), 100, fl_shp);
if (rd != 100)
goto error;
if (gaiaImport32 (buf_shp + 0, GAIA_BIG_ENDIAN, endian_arch) != 9994) /* checks the SHP magic number */
goto error;
shape = gaiaImport32 (buf_shp + 32, GAIA_LITTLE_ENDIAN, endian_arch);
if (shape == GAIA_SHP_POINT || shape == GAIA_SHP_POINTZ
|| shape == GAIA_SHP_POINTM || shape == GAIA_SHP_POLYLINE
|| shape == GAIA_SHP_POLYLINEZ || shape == GAIA_SHP_POLYLINEM
|| shape == GAIA_SHP_POLYGON || shape == GAIA_SHP_POLYGONZ
|| shape == GAIA_SHP_POLYGONM || shape == GAIA_SHP_MULTIPOINT
|| shape == GAIA_SHP_MULTIPOINTZ || shape == GAIA_SHP_MULTIPOINTM)
;
else
goto unsupported;
/* reading DBF file header */
rd = fread (bf, sizeof (unsigned char), 32, fl_dbf);
if (rd != 32)
goto error;
switch (*bf)
{
/* checks the DBF magic number */
case 0x03:
case 0x83:
break;
case 0x02:
case 0xF8:
sprintf (errMsg, "'%s'\ninvalid magic number %02x [FoxBASE format]",
path, *bf);
goto dbf_bad_magic;
case 0xF5:
sprintf (errMsg,
"'%s'\ninvalid magic number %02x [FoxPro 2.x (or earlier) format]",
path, *bf);
goto dbf_bad_magic;
case 0x30:
case 0x31:
case 0x32:
sprintf (errMsg,
"'%s'\ninvalid magic number %02x [Visual FoxPro format]",
path, *bf);
goto dbf_bad_magic;
case 0x43:
case 0x63:
case 0xBB:
case 0xCB:
sprintf (errMsg, "'%s'\ninvalid magic number %02x [dBASE IV format]",
path, *bf);
goto dbf_bad_magic;
default:
sprintf (errMsg, "'%s'\ninvalid magic number %02x [unknown format]",
path, *bf);
goto dbf_bad_magic;
};
dbf_size = gaiaImport16 (bf + 8, GAIA_LITTLE_ENDIAN, endian_arch);
dbf_reclen = gaiaImport16 (bf + 10, GAIA_LITTLE_ENDIAN, endian_arch);
dbf_size--;
off_dbf = 0;
dbf_list = gaiaAllocDbfList ();
for (ind = 32; ind < dbf_size; ind += 32)
{
/* fetches DBF fields definitions */
rd = fread (bf, sizeof (unsigned char), 32, fl_dbf);
if (rd != 32)
goto error;
if (*(bf + 11) == 'M')
{
/* skipping any MEMO field */
memcpy (field_name, bf, 11);
field_name[11] = '\0';
off_dbf += *(bf + 16);
spatialite_e
("WARNING: column \"%s\" is of the MEMO type and will be ignored\n",
field_name);
continue;
}
memcpy (field_name, bf, 11);
field_name[11] = '\0';
len = strlen ((char *) field_name);
utf8len = 2048;
pBuf = (char *) field_name;
pUtf8buf = utf8buf;
if (iconv
((iconv_t) (shp->IconvObj), &pBuf, &len, &pUtf8buf,
&utf8len) == (size_t) (-1))
goto conversion_error;
memcpy (field_name, utf8buf, 2048 - utf8len);
field_name[2048 - utf8len] = '\0';
gaiaAddDbfField (dbf_list, field_name, *(bf + 11), off_dbf,
*(bf + 16), *(bf + 17));
off_dbf += *(bf + 16);
}
if (!gaiaIsValidDbfList (dbf_list))
{
/* invalid DBF */
goto illegal_dbf;
}
len = strlen (path);
shp->Path = malloc (len + 1);
strcpy (shp->Path, path);
shp->ReadOnly = 1;
shp->Shape = shape;
switch (shape)
{
/* setting up a prudential geometry type */
case GAIA_SHP_POINT:
case GAIA_SHP_POINTZ:
case GAIA_SHP_POINTM:
shp->EffectiveType = GAIA_POINT;
break;
case GAIA_SHP_POLYLINE:
case GAIA_SHP_POLYLINEZ:
case GAIA_SHP_POLYLINEM:
shp->EffectiveType = GAIA_MULTILINESTRING;
break;
case GAIA_SHP_POLYGON:
case GAIA_SHP_POLYGONZ:
case GAIA_SHP_POLYGONM:
shp->EffectiveType = GAIA_MULTIPOLYGON;
break;
case GAIA_SHP_MULTIPOINT:
case GAIA_SHP_MULTIPOINTZ:
case GAIA_SHP_MULTIPOINTM:
shp->EffectiveType = GAIA_MULTIPOINT;
break;
}
switch (shape)
{
/* setting up a prudential dimension model */
case GAIA_SHP_POINTZ:
case GAIA_SHP_POLYLINEZ:
case GAIA_SHP_POLYGONZ:
case GAIA_SHP_MULTIPOINTZ:
shp->EffectiveDims = GAIA_XY_Z_M;
break;
case GAIA_SHP_POINTM:
case GAIA_SHP_POLYLINEM:
case GAIA_SHP_POLYGONM:
case GAIA_SHP_MULTIPOINTM:
shp->EffectiveDims = GAIA_XY_M;
break;
default:
shp->EffectiveDims = GAIA_XY;
break;
}
shp->flShp = fl_shp;
shp->flShx = fl_shx;
shp->flDbf = fl_dbf;
shp->Dbf = dbf_list;
/* saving the SHP buffer */
shp->BufShp = buf_shp;
shp->ShpBfsz = buf_size;
/* allocating DBF buffer */
shp->BufDbf = malloc (sizeof (unsigned char) * dbf_reclen);
shp->DbfHdsz = dbf_size + 1;
shp->DbfReclen = dbf_reclen;
shp->Valid = 1;
shp->endian_arch = endian_arch;
return;
unsupported_conversion:
/* illegal charset */
if (shp->LastError)
free (shp->LastError);
len = strlen (errMsg);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, errMsg);
return;
no_file:
/* one of shapefile's files can't be accessed */
if (shp->LastError)
free (shp->LastError);
len = strlen (errMsg);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, errMsg);
if (fl_shx)
fclose (fl_shx);
if (fl_shp)
fclose (fl_shp);
if (fl_dbf)
fclose (fl_dbf);
return;
dbf_bad_magic:
/* the DBF has an invalid magin number */
if (shp->LastError)
free (shp->LastError);
len = strlen (errMsg);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, errMsg);
gaiaFreeDbfList (dbf_list);
if (buf_shp)
free (buf_shp);
fclose (fl_shx);
fclose (fl_shp);
fclose (fl_dbf);
return;
error:
/* the shapefile is invalid or corrupted */
if (shp->LastError)
free (shp->LastError);
sprintf (errMsg, "'%s' is corrupted / has invalid format", path);
len = strlen (errMsg);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, errMsg);
gaiaFreeDbfList (dbf_list);
if (buf_shp)
free (buf_shp);
fclose (fl_shx);
fclose (fl_shp);
fclose (fl_dbf);
return;
unsupported:
/* the shapefile has an unrecognized shape type */
if (shp->LastError)
free (shp->LastError);
sprintf (errMsg, "'%s' shape=%d is not supported", path, shape);
len = strlen (errMsg);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, errMsg);
gaiaFreeDbfList (dbf_list);
if (buf_shp)
free (buf_shp);
fclose (fl_shx);
fclose (fl_shp);
if (fl_dbf)
fclose (fl_dbf);
return;
illegal_dbf:
/* the DBF-file contains unsupported data types */
if (shp->LastError)
free (shp->LastError);
sprintf (errMsg, "'%s.dbf' contains unsupported data types", path);
len = strlen (errMsg);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, errMsg);
gaiaFreeDbfList (dbf_list);
if (buf_shp)
free (buf_shp);
fclose (fl_shx);
fclose (fl_shp);
if (fl_dbf)
fclose (fl_dbf);
return;
conversion_error:
/* libiconv error */
if (shp->LastError)
free (shp->LastError);
sprintf (errMsg, "'%s.dbf' field name: invalid character sequence", path);
len = strlen (errMsg);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, errMsg);
gaiaFreeDbfList (dbf_list);
if (buf_shp)
free (buf_shp);
fclose (fl_shx);
fclose (fl_shp);
if (fl_dbf)
fclose (fl_dbf);
return;
}
static struct auxdbf_list *
alloc_auxdbf (gaiaDbfListPtr dbf_list)
{
/* allocating the auxiliary DBF struct */
int len;
gaiaDbfFieldPtr fld;
struct auxdbf_fld *fld_ex;
struct auxdbf_list *auxdbf = malloc (sizeof (struct auxdbf_list));
auxdbf->first = NULL;
auxdbf->last = NULL;
fld = dbf_list->First;
while (fld)
{
fld_ex = malloc (sizeof (struct auxdbf_fld));
len = strlen (fld->Name);
fld_ex->name = malloc (len + 1);
strcpy (fld_ex->name, fld->Name);
fld_ex->next = NULL;
if (auxdbf->first == NULL)
auxdbf->first = fld_ex;
if (auxdbf->last != NULL)
auxdbf->last->next = fld_ex;
auxdbf->last = fld_ex;
fld = fld->Next;
}
return auxdbf;
}
static void
free_auxdbf (struct auxdbf_list *auxdbf)
{
/* freeing an auxiliary DBF struct */
struct auxdbf_fld *n_fld;
struct auxdbf_fld *fld = auxdbf->first;
while (fld != NULL)
{
n_fld = fld->next;
if (fld->name != NULL)
free (fld->name);
free (fld);
fld = n_fld;
}
free (auxdbf);
}
static void
truncate_long_name (struct auxdbf_list *list, gaiaDbfFieldPtr xfld)
{
/* attempting to create a unique short name <= 10 bytes */
char suffix;
char buf[16];
struct auxdbf_fld *fld;
struct auxdbf_fld *base = NULL;
memcpy (buf, xfld->Name, 9);
buf[10] = '\0';
fld = list->first;
while (fld)
{
/* identifying the base aux Field */
if (strcmp (xfld->Name, fld->name) == 0)
{
base = fld;
break;
}
fld = fld->next;
}
suffix = '0';
while (1)
{
/* attempting to find a numeric suffix ensuring uniqueness */
int ok = 1;
buf[9] = suffix;
fld = list->first;
while (fld)
{
if (base != fld)
{
if (strcasecmp (buf, fld->name) == 0)
{
/* invalid: already defined */
ok = 0;
break;
}
}
fld = fld->next;
}
if (ok)
{
strcpy (xfld->Name, buf);
if (base != NULL)
strcpy (base->name, buf);
return;
}
if (suffix == '9')
break;
else
suffix++;
}
suffix = 'A';
while (1)
{
/* attempting to find a letter suffix ensuring uniqueness */
int ok = 1;
buf[9] = suffix;
fld = list->first;
while (fld)
{
if (base != fld)
{
if (strcasecmp (buf, fld->name) == 0)
{
/* invalid: already defined */
ok = 0;
break;
}
}
fld = fld->next;
}
if (ok)
{
strcpy (xfld->Name, buf);
if (base != NULL)
strcpy (base->name, buf);
return;
}
if (suffix == 'Z')
break;
else
suffix++;
}
}
GAIAGEO_DECLARE void
gaiaOpenShpWrite (gaiaShapefilePtr shp, const char *path, int shape,
gaiaDbfListPtr dbf_list, const char *charFrom,
const char *charTo)
{
/* trying to create the shapefile */
FILE *fl_shx = NULL;
FILE *fl_shp = NULL;
FILE *fl_dbf = NULL;
char xpath[1024];
unsigned char *buf_shp = NULL;
int buf_size = 1024;
unsigned char *dbf_buf = NULL;
gaiaDbfFieldPtr fld;
char *sys_err;
char errMsg[1024];
short dbf_reclen = 0;
int shp_size = 0;
int shx_size = 0;
unsigned short dbf_size = 0;
iconv_t iconv_ret;
int endian_arch = gaiaEndianArch ();
char buf[2048];
char utf8buf[2048];
#if !defined(__MINGW32__) && defined(_WIN32)
const char *pBuf;
#else /* not WIN32 */
char *pBuf;
#endif
size_t len;
size_t utf8len;
char *pUtf8buf;
int defaultId = 1;
struct auxdbf_list *auxdbf = NULL;
if (charFrom && charTo)
{
iconv_ret = iconv_open (charTo, charFrom);
if (iconv_ret == (iconv_t) (-1))
{
sprintf (errMsg, "conversion from '%s' to '%s' not available\n",
charFrom, charTo);
goto unsupported_conversion;
}
shp->IconvObj = iconv_ret;
}
else
{
sprintf (errMsg, "a NULL charset-name was passed\n");
goto unsupported_conversion;
}
if (shp->flShp != NULL || shp->flShx != NULL || shp->flDbf != NULL)
{
sprintf (errMsg,
"attempting to reopen an already opened Shapefile\n");
goto unsupported_conversion;
}
buf_shp = malloc (buf_size);
/* trying to open shapefile files */
sprintf (xpath, "%s.shx", path);
fl_shx = fopen (xpath, "wb");
if (!fl_shx)
{
sys_err = strerror (errno);
sprintf (errMsg, "unable to open '%s' for writing: %s", xpath,
sys_err);
goto no_file;
}
sprintf (xpath, "%s.shp", path);
fl_shp = fopen (xpath, "wb");
if (!fl_shp)
{
sys_err = strerror (errno);
sprintf (errMsg, "unable to open '%s' for writing: %s", xpath,
sys_err);
goto no_file;
}
sprintf (xpath, "%s.dbf", path);
fl_dbf = fopen (xpath, "wb");
if (!fl_dbf)
{
sys_err = strerror (errno);
sprintf (errMsg, "unable to open '%s' for writing: %s", xpath,
sys_err);
goto no_file;
}
/* allocating DBF buffer */
dbf_reclen = 1; /* an extra byte is needed because in DBF rows first byte is a marker for deletion */
fld = dbf_list->First;
while (fld)
{
/* computing the DBF record length */
dbf_reclen += fld->Length;
fld = fld->Next;
}
dbf_buf = malloc (dbf_reclen);
/* writing an empty SHP file header */
memset (buf_shp, 0, 100);
fwrite (buf_shp, 1, 100, fl_shp);
shp_size = 50; /* note: shapefile [SHP and SHX] counts sizes in WORDS of 16 bits, not in bytes of 8 bits !!!! */
/* writing an empty SHX file header */
memset (buf_shp, 0, 100);
fwrite (buf_shp, 1, 100, fl_shx);
shx_size = 50;
/* writing the DBF file header */
memset (buf_shp, '\0', 32);
fwrite (buf_shp, 1, 32, fl_dbf);
dbf_size = 32; /* note: DBF counts sizes in bytes */
auxdbf = alloc_auxdbf (dbf_list);
fld = dbf_list->First;
while (fld)
{
/* exporting DBF Fields specifications */
memset (buf_shp, 0, 32);
if (strlen (fld->Name) > 10)
{
/* long name: attempting to safely truncate */
truncate_long_name (auxdbf, fld);
}
strcpy (buf, fld->Name);
len = strlen (buf);
utf8len = 2048;
pBuf = buf;
pUtf8buf = utf8buf;
if (iconv
((iconv_t) (shp->IconvObj), &pBuf, &len, &pUtf8buf,
&utf8len) == (size_t) (-1))
sprintf (buf, "FLD#%d", defaultId++);
else
{
memcpy (buf, utf8buf, 2048 - utf8len);
buf[2048 - utf8len] = '\0';
if (strlen (buf) > 10)
sprintf (buf, "FLD#%d", defaultId++);
}
memcpy (buf_shp, buf, strlen (buf));
*(buf_shp + 11) = fld->Type;
*(buf_shp + 16) = fld->Length;
*(buf_shp + 17) = fld->Decimals;
fwrite (buf_shp, 1, 32, fl_dbf);
dbf_size += 32;
fld = fld->Next;
}
free_auxdbf (auxdbf);
fwrite ("\r", 1, 1, fl_dbf); /* this one is a special DBF delimiter that closes file header */
dbf_size++;
/* setting up the SHP struct */
len = strlen (path);
shp->Path = malloc (len + 1);
strcpy (shp->Path, path);
shp->ReadOnly = 0;
switch (shape)
{
/* setting up SHAPE and dimensions */
case GAIA_POINT:
shp->Shape = GAIA_SHP_POINT;
shp->EffectiveType = GAIA_POINT;
shp->EffectiveDims = GAIA_XY;
break;
case GAIA_POINTZ:
shp->Shape = GAIA_SHP_POINTZ;
shp->EffectiveType = GAIA_POINT;
shp->EffectiveDims = GAIA_XY_Z;
break;
case GAIA_POINTM:
shp->Shape = GAIA_SHP_POINTM;
shp->EffectiveType = GAIA_POINT;
shp->EffectiveDims = GAIA_XY_M;
break;
case GAIA_POINTZM:
shp->Shape = GAIA_SHP_POINTZ;
shp->EffectiveType = GAIA_POINT;
shp->EffectiveDims = GAIA_XY_Z_M;
break;
case GAIA_MULTIPOINT:
shp->Shape = GAIA_SHP_MULTIPOINT;
shp->EffectiveType = GAIA_MULTIPOINT;
shp->EffectiveDims = GAIA_XY;
break;
case GAIA_MULTIPOINTZ:
shp->Shape = GAIA_SHP_MULTIPOINTZ;
shp->EffectiveType = GAIA_MULTIPOINT;
shp->EffectiveDims = GAIA_XY_Z;
break;
case GAIA_MULTIPOINTM:
shp->Shape = GAIA_SHP_MULTIPOINTM;
shp->EffectiveType = GAIA_MULTIPOINT;
shp->EffectiveDims = GAIA_XY_M;
break;
case GAIA_MULTIPOINTZM:
shp->Shape = GAIA_SHP_MULTIPOINTZ;
shp->EffectiveType = GAIA_MULTIPOINT;
shp->EffectiveDims = GAIA_XY_Z_M;
break;
case GAIA_LINESTRING:
shp->Shape = GAIA_SHP_POLYLINE;
shp->EffectiveType = GAIA_LINESTRING;
shp->EffectiveDims = GAIA_XY;
break;
case GAIA_LINESTRINGZ:
shp->Shape = GAIA_SHP_POLYLINEZ;
shp->EffectiveType = GAIA_LINESTRING;
shp->EffectiveDims = GAIA_XY_Z;
break;
case GAIA_LINESTRINGM:
shp->Shape = GAIA_SHP_POLYLINEM;
shp->EffectiveType = GAIA_LINESTRING;
shp->EffectiveDims = GAIA_XY_M;
break;
case GAIA_LINESTRINGZM:
shp->Shape = GAIA_SHP_POLYLINEZ;
shp->EffectiveType = GAIA_LINESTRING;
shp->EffectiveDims = GAIA_XY_Z_M;
break;
case GAIA_MULTILINESTRING:
shp->Shape = GAIA_SHP_POLYLINE;
shp->EffectiveType = GAIA_MULTILINESTRING;
shp->EffectiveDims = GAIA_XY;
break;
case GAIA_MULTILINESTRINGZ:
shp->Shape = GAIA_SHP_POLYLINEZ;
shp->EffectiveType = GAIA_MULTILINESTRING;
shp->EffectiveDims = GAIA_XY_Z;
break;
case GAIA_MULTILINESTRINGM:
shp->Shape = GAIA_SHP_POLYLINEM;
shp->EffectiveType = GAIA_MULTILINESTRING;
shp->EffectiveDims = GAIA_XY_M;
break;
case GAIA_MULTILINESTRINGZM:
shp->Shape = GAIA_SHP_POLYLINEZ;
shp->EffectiveType = GAIA_MULTILINESTRING;
shp->EffectiveDims = GAIA_XY_Z_M;
break;
case GAIA_POLYGON:
shp->Shape = GAIA_SHP_POLYGON;
shp->EffectiveType = GAIA_POLYGON;
shp->EffectiveDims = GAIA_XY;
break;
case GAIA_POLYGONZ:
shp->Shape = GAIA_SHP_POLYGONZ;
shp->EffectiveType = GAIA_POLYGON;
shp->EffectiveDims = GAIA_XY_Z;
break;
case GAIA_POLYGONM:
shp->Shape = GAIA_SHP_POLYGONM;
shp->EffectiveType = GAIA_POLYGON;
shp->EffectiveDims = GAIA_XY_M;
break;
case GAIA_POLYGONZM:
shp->Shape = GAIA_SHP_POLYGONZ;
shp->EffectiveType = GAIA_POLYGON;
shp->EffectiveDims = GAIA_XY_Z_M;
break;
case GAIA_MULTIPOLYGON:
shp->Shape = GAIA_SHP_POLYGON;
shp->EffectiveType = GAIA_MULTIPOLYGON;
shp->EffectiveDims = GAIA_XY;
break;
case GAIA_MULTIPOLYGONZ:
shp->Shape = GAIA_SHP_POLYGONZ;
shp->EffectiveType = GAIA_MULTIPOLYGON;
shp->EffectiveDims = GAIA_XY_Z;
break;
case GAIA_MULTIPOLYGONM:
shp->Shape = GAIA_SHP_POLYGONM;
shp->EffectiveType = GAIA_MULTIPOLYGON;
shp->EffectiveDims = GAIA_XY_M;
break;
case GAIA_MULTIPOLYGONZM:
shp->Shape = GAIA_SHP_POLYGONZ;
shp->EffectiveType = GAIA_MULTIPOLYGON;
shp->EffectiveDims = GAIA_XY_Z_M;
break;
};
shp->flShp = fl_shp;
shp->flShx = fl_shx;
shp->flDbf = fl_dbf;
shp->Dbf = dbf_list;
shp->BufShp = buf_shp;
shp->ShpBfsz = buf_size;
shp->BufDbf = dbf_buf;
shp->DbfHdsz = dbf_size + 1;
shp->DbfReclen = dbf_reclen;
shp->DbfSize = dbf_size;
shp->DbfRecno = 0;
shp->ShpSize = shp_size;
shp->ShxSize = shx_size;
shp->MinX = DBL_MAX;
shp->MinY = DBL_MAX;
shp->MaxX = -DBL_MAX;
shp->MaxY = -DBL_MAX;
shp->Valid = 1;
shp->endian_arch = endian_arch;
return;
unsupported_conversion:
/* illegal charset */
if (shp->LastError)
free (shp->LastError);
len = strlen (errMsg);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, errMsg);
return;
no_file:
/* one of shapefile's files can't be created/opened */
if (shp->LastError)
free (shp->LastError);
len = strlen (errMsg);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, errMsg);
if (buf_shp)
free (buf_shp);
if (fl_shx)
fclose (fl_shx);
if (fl_shp)
fclose (fl_shp);
if (fl_dbf)
fclose (fl_dbf);
return;
}
static double
to_sqlite_julian_date (int year, int month, int day, double *julian)
{
/* trying to convert an 'YYYY-MM-DD' date into a JulianDate [double] */
int Y;
int M;
int D;
int A;
int B;
int X1;
int X2;
if (year < 1900 || year > 2400)
return 0;
if (month < 1 || month > 12)
return 0;
if (day < 1)
return 0;
switch (month)
{
case 2:
if ((year / 4) == 0)
{
if (day > 29)
return 0;
}
else
{
if (day > 28)
return 0;
}
break;
case 4:
case 6:
case 9:
case 11:
if (day > 30)
return 0;
break;
default:
if (day > 31)
return 0;
};
/* computing the Julian date */
Y = year;
M = month;
D = day;
if (M <= 2)
{
Y--;
M += 12;
}
A = Y / 100;
B = 2 - A + (A / 4);
X1 = 36525 * (Y + 4716) / 100;
X2 = 306001 * (M + 1) / 10000;
*julian = (double) (X1 + X2 + D + B - 1524.5);
return 1;
}
static int
parseDbfField (unsigned char *buf_dbf, void *iconv_obj, gaiaDbfFieldPtr pFld,
int text_dates)
{
/* parsing a generic DBF field */
unsigned char buf[512];
char utf8buf[2048];
#if !defined(__MINGW32__) && defined(_WIN32)
const char *pBuf;
#else /* not WIN32 */
char *pBuf;
#endif
size_t len;
size_t utf8len;
char *pUtf8buf;
int i;
memcpy (buf, buf_dbf + pFld->Offset + 1, pFld->Length);
buf[pFld->Length] = '\0';
if (*buf == '\0')
gaiaSetNullValue (pFld);
else
{
if (pFld->Type == 'N')
{
/* NUMERIC value */
if (pFld->Decimals > 0 || pFld->Length > 18)
gaiaSetDoubleValue (pFld, atof ((char *) buf));
else
gaiaSetIntValue (pFld, atoll ((char *) buf));
}
else if (pFld->Type == 'M')
{
/* MEMO value - assumed to always be NULL */
gaiaSetNullValue (pFld);
}
else if (pFld->Type == 'F')
{
/* FLOAT value */
gaiaSetDoubleValue (pFld, atof ((char *) buf));
}
else if (pFld->Type == 'D')
{
/* DATE value */
if (text_dates)
{
/* assuming to be plain text */
gaiaSetStrValue (pFld, (char *) buf);
}
else
{
if (strlen ((char *) buf) != 8)
gaiaSetNullValue (pFld);
else
{
/* converting into a Julian Date */
double julian;
char date[5];
int year = 0;
int month = 0;
int day = 0;
date[0] = buf[0];
date[1] = buf[1];
date[2] = buf[2];
date[3] = buf[3];
date[4] = '\0';
year = atoi (date);
date[0] = buf[4];
date[1] = buf[5];
date[2] = '\0';
month = atoi (date);
date[0] = buf[6];
date[1] = buf[7];
date[2] = '\0';
day = atoi (date);
if (to_sqlite_julian_date
(year, month, day, &julian))
gaiaSetDoubleValue (pFld, julian);
else
gaiaSetNullValue (pFld);
}
}
}
else if (pFld->Type == 'L')
{
/* LOGICAL [aka Boolean] value */
if (*buf == '1' || *buf == 't' || *buf == 'T'
|| *buf == 'Y' || *buf == 'y')
gaiaSetIntValue (pFld, 1);
else
gaiaSetIntValue (pFld, 0);
}
else
{
/* CHARACTER [aka String, Text] value */
/* Sandro 2013-01-07
/ fixing an issue reported by Filip Arlet <filip.arlet@gmail.com>
for (i = strlen ((char *) buf) - 1; i > 1; i--)
*/
for (i = strlen ((char *) buf) - 1; i >= 0; i--)
{
/* cleaning up trailing spaces */
if (buf[i] == ' ')
buf[i] = '\0';
else
break;
}
len = strlen ((char *) buf);
utf8len = 2048;
pBuf = (char *) buf;
pUtf8buf = utf8buf;
if (iconv
((iconv_t) (iconv_obj), &pBuf, &len, &pUtf8buf,
&utf8len) == (size_t) (-1))
return 0;
memcpy (buf, utf8buf, 2048 - utf8len);
buf[2048 - utf8len] = '\0';
gaiaSetStrValue (pFld, (char *) buf);
}
}
return 1;
}
struct shp_ring_item
{
/* a RING item [to be reassembled into a (Multi)Polygon] */
gaiaRingPtr Ring;
int IsExterior;
gaiaRingPtr Mother;
struct shp_ring_item *Next;
};
struct shp_ring_collection
{
/* a collection of RING items */
struct shp_ring_item *First;
struct shp_ring_item *Last;
};
static void
shp_free_rings (struct shp_ring_collection *ringsColl)
{
/* memory cleanup: rings collection */
struct shp_ring_item *p;
struct shp_ring_item *pN;
p = ringsColl->First;
while (p)
{
pN = p->Next;
if (p->Ring)
gaiaFreeRing (p->Ring);
free (p);
p = pN;
}
}
static void
shp_add_ring (struct shp_ring_collection *ringsColl, gaiaRingPtr ring)
{
/* inserting a ring into the rings collection */
struct shp_ring_item *p = malloc (sizeof (struct shp_ring_item));
p->Ring = ring;
gaiaMbrRing (ring);
gaiaClockwise (ring);
/* accordingly to SHP rules interior/exterior depends on direction */
p->IsExterior = ring->Clockwise;
p->Mother = NULL;
p->Next = NULL;
/* updating the linked list */
if (ringsColl->First == NULL)
ringsColl->First = p;
if (ringsColl->Last != NULL)
ringsColl->Last->Next = p;
ringsColl->Last = p;
}
static int
shp_check_rings (gaiaRingPtr exterior, gaiaRingPtr candidate)
{
/*
/ speditively checks if the candidate could be an interior Ring
/ contained into the exterior Ring
*/
double z;
double m;
double x0;
double y0;
double x1;
double y1;
int mid;
int ret0;
int ret1;
if (candidate->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (candidate->Coords, 0, &x0, &y0, &z);
}
else if (candidate->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (candidate->Coords, 0, &x0, &y0, &m);
}
else if (candidate->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (candidate->Coords, 0, &x0, &y0, &z, &m);
}
else
{
gaiaGetPoint (candidate->Coords, 0, &x0, &y0);
}
mid = candidate->Points / 2;
if (candidate->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (candidate->Coords, mid, &x1, &y1, &z);
}
else if (candidate->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (candidate->Coords, mid, &x1, &y1, &m);
}
else if (candidate->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (candidate->Coords, mid, &x1, &y1, &z, &m);
}
else
{
gaiaGetPoint (candidate->Coords, mid, &x1, &y1);
}
/* testing if the first point falls on the exterior ring surface */
ret0 = gaiaIsPointOnRingSurface (exterior, x0, y0);
/* testing if the second point falls on the exterior ring surface */
ret1 = gaiaIsPointOnRingSurface (exterior, x1, y1);
if (ret0 || ret1)
return 1;
return 0;
}
static int
shp_mbr_contains (gaiaRingPtr r1, gaiaRingPtr r2)
{
/* checks if the first Ring contains the second one - MBR based */
int ok_1 = 0;
int ok_2 = 0;
int ok_3 = 0;
int ok_4 = 0;
if (r2->MinX >= r1->MinX && r2->MinX <= r1->MaxX)
ok_1 = 1;
if (r2->MaxX >= r1->MinX && r2->MaxX <= r1->MaxX)
ok_2 = 1;
if (r2->MinY >= r1->MinY && r2->MinY <= r1->MaxY)
ok_3 = 1;
if (r2->MaxY >= r1->MinY && r2->MaxY <= r1->MaxY)
ok_4 = 1;
if (ok_1 && ok_2 && ok_3 && ok_4)
return 1;
return 0;
}
static void
shp_arrange_rings (struct shp_ring_collection *ringsColl)
{
/*
/ arranging Rings so to associate any interior ring
/ to the containing exterior ring
*/
struct shp_ring_item *pInt;
struct shp_ring_item *pExt;
pExt = ringsColl->First;
while (pExt != NULL)
{
/* looping on Exterior Rings */
if (pExt->IsExterior)
{
pInt = ringsColl->First;
while (pInt != NULL)
{
/* looping on Interior Rings */
if (pInt->IsExterior == 0 && pInt->Mother == NULL
&& shp_mbr_contains (pExt->Ring, pInt->Ring))
{
/* ok, matches */
if (shp_check_rings (pExt->Ring, pInt->Ring))
pInt->Mother = pExt->Ring;
}
pInt = pInt->Next;
}
}
pExt = pExt->Next;
}
pExt = ringsColl->First;
while (pExt != NULL)
{
if (pExt->IsExterior == 0 && pExt->Mother == NULL)
{
/* orphan ring: promoting to Exterior */
pExt->IsExterior = 1;
}
pExt = pExt->Next;
}
}
static void
shp_build_area (struct shp_ring_collection *ringsColl, gaiaGeomCollPtr geom)
{
/* building the final (Multi)Polygon Geometry */
gaiaPolygonPtr polyg;
struct shp_ring_item *pExt;
struct shp_ring_item *pInt;
pExt = ringsColl->First;
while (pExt != NULL)
{
if (pExt->IsExterior)
{
/* creating a new Polygon */
polyg = gaiaInsertPolygonInGeomColl (geom, pExt->Ring);
pInt = ringsColl->First;
while (pInt != NULL)
{
if (pExt->Ring == pInt->Mother)
{
/* adding an interior ring to current POLYGON */
gaiaAddRingToPolyg (polyg, pInt->Ring);
/* releasing Ring ownership */
pInt->Ring = NULL;
}
pInt = pInt->Next;
}
/* releasing Ring ownership */
pExt->Ring = NULL;
}
pExt = pExt->Next;
}
}
GAIAGEO_DECLARE int
gaiaReadShpEntity (gaiaShapefilePtr shp, int current_row, int srid)
{
return gaiaReadShpEntity_ex (shp, current_row, srid, 0);
}
GAIAGEO_DECLARE int
gaiaReadShpEntity_ex (gaiaShapefilePtr shp, int current_row, int srid,
int text_dates)
{
/* trying to read an entity from shapefile */
unsigned char buf[512];
int len;
int rd;
int skpos;
int offset;
int off_shp;
int sz;
int shape;
double x;
double y;
double z;
double m;
int points;
int n;
int n1;
int base;
int baseZ;
int baseM;
int start;
int end;
int iv;
int ind;
int max_size;
int min_size;
int hasM;
char errMsg[1024];
gaiaGeomCollPtr geom = NULL;
gaiaLinestringPtr line = NULL;
gaiaRingPtr ring = NULL;
gaiaDbfFieldPtr pFld;
struct shp_ring_collection ringsColl;
/* initializing the RING collection */
ringsColl.First = NULL;
ringsColl.Last = NULL;
/* positioning and reading the SHX file */
offset = 100 + (current_row * 8); /* 100 bytes for the header + current row displacement; each SHX row = 8 bytes */
skpos = fseek (shp->flShx, offset, SEEK_SET);
if (skpos != 0)
goto eof;
rd = fread (buf, sizeof (unsigned char), 8, shp->flShx);
if (rd != 8)
goto eof;
off_shp = gaiaImport32 (buf, GAIA_BIG_ENDIAN, shp->endian_arch);
/* positioning and reading the DBF file */
offset = shp->DbfHdsz + (current_row * shp->DbfReclen);
skpos = fseek (shp->flDbf, offset, SEEK_SET);
if (skpos != 0)
goto error;
rd = fread (shp->BufDbf, sizeof (unsigned char), shp->DbfReclen,
shp->flDbf);
if (rd != shp->DbfReclen)
goto error;
/* positioning and reading corresponding SHP entity - geometry */
offset = off_shp * 2;
skpos = fseek (shp->flShp, offset, SEEK_SET);
if (skpos != 0)
goto error;
rd = fread (buf, sizeof (unsigned char), 12, shp->flShp);
if (rd != 12)
goto error;
sz = gaiaImport32 (buf + 4, GAIA_BIG_ENDIAN, shp->endian_arch);
shape = gaiaImport32 (buf + 8, GAIA_LITTLE_ENDIAN, shp->endian_arch);
if (shape == GAIA_SHP_NULL)
{
/* handling a NULL shape */
goto null_shape;
}
else if (shape != shp->Shape)
goto error;
if ((sz * 2) > shp->ShpBfsz)
{
/* current buffer is too small; we need to allocate a bigger buffer */
free (shp->BufShp);
shp->ShpBfsz = sz * 2;
shp->BufShp = malloc (sizeof (unsigned char) * shp->ShpBfsz);
}
if (shape == GAIA_SHP_POINT)
{
/* shape point */
rd = fread (shp->BufShp, sizeof (unsigned char), 16, shp->flShp);
if (rd != 16)
goto error;
x = gaiaImport64 (shp->BufShp, GAIA_LITTLE_ENDIAN, shp->endian_arch);
y = gaiaImport64 (shp->BufShp + 8, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
if (shp->EffectiveDims == GAIA_XY_Z)
{
geom = gaiaAllocGeomCollXYZ ();
gaiaAddPointToGeomCollXYZ (geom, x, y, 0.0);
}
else if (shp->EffectiveDims == GAIA_XY_M)
{
geom = gaiaAllocGeomCollXYM ();
gaiaAddPointToGeomCollXYM (geom, x, y, 0.0);
}
else if (shp->EffectiveDims == GAIA_XY_Z_M)
{
geom = gaiaAllocGeomCollXYZM ();
gaiaAddPointToGeomCollXYZM (geom, x, y, 0.0, 0.0);
}
else
{
geom = gaiaAllocGeomColl ();
gaiaAddPointToGeomColl (geom, x, y);
}
geom->DeclaredType = GAIA_POINT;
geom->Srid = srid;
}
if (shape == GAIA_SHP_POINTZ)
{
/* shape point Z */
rd = fread (shp->BufShp, sizeof (unsigned char), 32, shp->flShp);
if (rd != 32)
{
/* required by some buggish SHP (e.g. the GDAL/OGR ones) */
if (rd != 24)
goto error;
}
x = gaiaImport64 (shp->BufShp, GAIA_LITTLE_ENDIAN, shp->endian_arch);
y = gaiaImport64 (shp->BufShp + 8, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
z = gaiaImport64 (shp->BufShp + 16, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
if (rd == 24)
m = 0.0;
else
m = gaiaImport64 (shp->BufShp + 24, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
if (shp->EffectiveDims == GAIA_XY_Z)
{
geom = gaiaAllocGeomCollXYZ ();
gaiaAddPointToGeomCollXYZ (geom, x, y, z);
}
else if (shp->EffectiveDims == GAIA_XY_M)
{
geom = gaiaAllocGeomCollXYM ();
gaiaAddPointToGeomCollXYM (geom, x, y, m);
}
else if (shp->EffectiveDims == GAIA_XY_Z_M)
{
geom = gaiaAllocGeomCollXYZM ();
gaiaAddPointToGeomCollXYZM (geom, x, y, z, m);
}
else
{
geom = gaiaAllocGeomColl ();
gaiaAddPointToGeomColl (geom, x, y);
}
geom->DeclaredType = GAIA_POINT;
geom->Srid = srid;
}
if (shape == GAIA_SHP_POINTM)
{
/* shape point M */
rd = fread (shp->BufShp, sizeof (unsigned char), 24, shp->flShp);
if (rd != 24)
goto error;
x = gaiaImport64 (shp->BufShp, GAIA_LITTLE_ENDIAN, shp->endian_arch);
y = gaiaImport64 (shp->BufShp + 8, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
m = gaiaImport64 (shp->BufShp + 16, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
if (shp->EffectiveDims == GAIA_XY_Z)
{
geom = gaiaAllocGeomCollXYZ ();
gaiaAddPointToGeomCollXYZ (geom, x, y, 0.0);
}
else if (shp->EffectiveDims == GAIA_XY_M)
{
geom = gaiaAllocGeomCollXYM ();
gaiaAddPointToGeomCollXYM (geom, x, y, m);
}
else if (shp->EffectiveDims == GAIA_XY_Z_M)
{
geom = gaiaAllocGeomCollXYZM ();
gaiaAddPointToGeomCollXYZM (geom, x, y, 0.0, m);
}
else
{
geom = gaiaAllocGeomColl ();
gaiaAddPointToGeomColl (geom, x, y);
}
geom->DeclaredType = GAIA_POINT;
geom->Srid = srid;
}
if (shape == GAIA_SHP_POLYLINE)
{
/* shape polyline */
rd = fread (shp->BufShp, sizeof (unsigned char), 32, shp->flShp);
if (rd != 32)
goto error;
rd = fread (shp->BufShp, sizeof (unsigned char), (sz * 2) - 36,
shp->flShp);
if (rd != (sz * 2) - 36)
goto error;
n = gaiaImport32 (shp->BufShp, GAIA_LITTLE_ENDIAN, shp->endian_arch);
n1 = gaiaImport32 (shp->BufShp + 4, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
base = 8 + (n * 4);
start = 0;
for (ind = 0; ind < n; ind++)
{
if (ind < (n - 1))
end =
gaiaImport32 (shp->BufShp + 8 + ((ind + 1) * 4),
GAIA_LITTLE_ENDIAN, shp->endian_arch);
else
end = n1;
points = end - start;
if (shp->EffectiveDims == GAIA_XY_Z)
line = gaiaAllocLinestringXYZ (points);
else if (shp->EffectiveDims == GAIA_XY_M)
line = gaiaAllocLinestringXYM (points);
else if (shp->EffectiveDims == GAIA_XY_Z_M)
line = gaiaAllocLinestringXYZM (points);
else
line = gaiaAllocLinestring (points);
points = 0;
for (iv = start; iv < end; iv++)
{
x = gaiaImport64 (shp->BufShp + base + (iv * 16),
GAIA_LITTLE_ENDIAN, shp->endian_arch);
y = gaiaImport64 (shp->BufShp + base + (iv * 16) +
8, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
if (shp->EffectiveDims == GAIA_XY_Z)
{
gaiaSetPointXYZ (line->Coords, points, x, y, 0.0);
}
else if (shp->EffectiveDims == GAIA_XY_M)
{
gaiaSetPointXYM (line->Coords, points, x, y, 0.0);
}
else if (shp->EffectiveDims == GAIA_XY_Z_M)
{
gaiaSetPointXYZM (line->Coords, points, x, y,
0.0, 0.0);
}
else
{
gaiaSetPoint (line->Coords, points, x, y);
}
start++;
points++;
}
if (!geom)
{
if (shp->EffectiveDims == GAIA_XY_Z)
geom = gaiaAllocGeomCollXYZ ();
else if (shp->EffectiveDims == GAIA_XY_M)
geom = gaiaAllocGeomCollXYM ();
else if (shp->EffectiveDims == GAIA_XY_Z_M)
geom = gaiaAllocGeomCollXYZM ();
else
geom = gaiaAllocGeomColl ();
if (shp->EffectiveType == GAIA_LINESTRING)
geom->DeclaredType = GAIA_LINESTRING;
else
geom->DeclaredType = GAIA_MULTILINESTRING;
geom->Srid = srid;
}
gaiaInsertLinestringInGeomColl (geom, line);
}
}
if (shape == GAIA_SHP_POLYLINEZ)
{
/* shape polyline Z */
rd = fread (shp->BufShp, sizeof (unsigned char), 32, shp->flShp);
if (rd != 32)
goto error;
rd = fread (shp->BufShp, sizeof (unsigned char), (sz * 2) - 36,
shp->flShp);
if (rd != (sz * 2) - 36)
goto error;
n = gaiaImport32 (shp->BufShp, GAIA_LITTLE_ENDIAN, shp->endian_arch);
n1 = gaiaImport32 (shp->BufShp + 4, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
hasM = 0;
max_size = 38 + (2 * n) + (n1 * 16); /* size [in 16 bits words !!!] ZM */
min_size = 30 + (2 * n) + (n1 * 12); /* size [in 16 bits words !!!] Z-only */
if (sz < min_size)
goto error;
if (sz == max_size)
hasM = 1;
base = 8 + (n * 4);
baseZ = base + (n1 * 16) + 16;
baseM = baseZ + (n1 * 8) + 16;
start = 0;
for (ind = 0; ind < n; ind++)
{
if (ind < (n - 1))
end =
gaiaImport32 (shp->BufShp + 8 + ((ind + 1) * 4),
GAIA_LITTLE_ENDIAN, shp->endian_arch);
else
end = n1;
points = end - start;
if (shp->EffectiveDims == GAIA_XY_Z)
line = gaiaAllocLinestringXYZ (points);
else if (shp->EffectiveDims == GAIA_XY_M)
line = gaiaAllocLinestringXYM (points);
else if (shp->EffectiveDims == GAIA_XY_Z_M)
line = gaiaAllocLinestringXYZM (points);
else
line = gaiaAllocLinestring (points);
points = 0;
for (iv = start; iv < end; iv++)
{
x = gaiaImport64 (shp->BufShp + base + (iv * 16),
GAIA_LITTLE_ENDIAN, shp->endian_arch);
y = gaiaImport64 (shp->BufShp + base + (iv * 16) +
8, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
z = gaiaImport64 (shp->BufShp + baseZ + (iv * 8),
GAIA_LITTLE_ENDIAN, shp->endian_arch);
if (hasM)
m = gaiaImport64 (shp->BufShp + baseM +
(iv * 8), GAIA_LITTLE_ENDIAN,
shp->endian_arch);
else
m = 0.0;
if (m < SHAPEFILE_NO_DATA)
m = 0.0;
if (shp->EffectiveDims == GAIA_XY_Z)
{
gaiaSetPointXYZ (line->Coords, points, x, y, z);
}
else if (shp->EffectiveDims == GAIA_XY_M)
{
gaiaSetPointXYM (line->Coords, points, x, y, m);
}
else if (shp->EffectiveDims == GAIA_XY_Z_M)
{
gaiaSetPointXYZM (line->Coords, points, x, y, z, m);
}
else
{
gaiaSetPoint (line->Coords, points, x, y);
}
start++;
points++;
}
if (!geom)
{
if (shp->EffectiveDims == GAIA_XY_Z)
geom = gaiaAllocGeomCollXYZ ();
else if (shp->EffectiveDims == GAIA_XY_M)
geom = gaiaAllocGeomCollXYM ();
else if (shp->EffectiveDims == GAIA_XY_Z_M)
geom = gaiaAllocGeomCollXYZM ();
else
geom = gaiaAllocGeomColl ();
if (shp->EffectiveType == GAIA_LINESTRING)
geom->DeclaredType = GAIA_LINESTRING;
else
geom->DeclaredType = GAIA_MULTILINESTRING;
geom->Srid = srid;
}
gaiaInsertLinestringInGeomColl (geom, line);
}
}
if (shape == GAIA_SHP_POLYLINEM)
{
/* shape polyline M */
rd = fread (shp->BufShp, sizeof (unsigned char), 32, shp->flShp);
if (rd != 32)
goto error;
rd = fread (shp->BufShp, sizeof (unsigned char), (sz * 2) - 36,
shp->flShp);
if (rd != (sz * 2) - 36)
goto error;
n = gaiaImport32 (shp->BufShp, GAIA_LITTLE_ENDIAN, shp->endian_arch);
n1 = gaiaImport32 (shp->BufShp + 4, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
hasM = 0;
max_size = 30 + (2 * n) + (n1 * 12); /* size [in 16 bits words !!!] M */
min_size = 22 + (2 * n) + (n1 * 8); /* size [in 16 bits words !!!] no-M */
if (sz < min_size)
goto error;
if (sz == max_size)
hasM = 1;
base = 8 + (n * 4);
baseM = base + (n1 * 16) + 16;
start = 0;
for (ind = 0; ind < n; ind++)
{
if (ind < (n - 1))
end =
gaiaImport32 (shp->BufShp + 8 + ((ind + 1) * 4),
GAIA_LITTLE_ENDIAN, shp->endian_arch);
else
end = n1;
points = end - start;
if (shp->EffectiveDims == GAIA_XY_Z)
line = gaiaAllocLinestringXYZ (points);
else if (shp->EffectiveDims == GAIA_XY_M)
line = gaiaAllocLinestringXYM (points);
else if (shp->EffectiveDims == GAIA_XY_Z_M)
line = gaiaAllocLinestringXYZM (points);
else
line = gaiaAllocLinestring (points);
points = 0;
for (iv = start; iv < end; iv++)
{
x = gaiaImport64 (shp->BufShp + base + (iv * 16),
GAIA_LITTLE_ENDIAN, shp->endian_arch);
y = gaiaImport64 (shp->BufShp + base + (iv * 16) +
8, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
if (hasM)
m = gaiaImport64 (shp->BufShp + baseM +
(iv * 8), GAIA_LITTLE_ENDIAN,
shp->endian_arch);
else
m = 0.0;
if (m < SHAPEFILE_NO_DATA)
m = 0.0;
if (shp->EffectiveDims == GAIA_XY_Z)
{
gaiaSetPointXYZ (line->Coords, points, x, y, 0.0);
}
else if (shp->EffectiveDims == GAIA_XY_M)
{
gaiaSetPointXYM (line->Coords, points, x, y, m);
}
else if (shp->EffectiveDims == GAIA_XY_Z_M)
{
gaiaSetPointXYZM (line->Coords, points, x, y,
0.0, m);
}
else
{
gaiaSetPoint (line->Coords, points, x, y);
}
start++;
points++;
}
if (!geom)
{
if (shp->EffectiveDims == GAIA_XY_Z)
geom = gaiaAllocGeomCollXYZ ();
else if (shp->EffectiveDims == GAIA_XY_M)
geom = gaiaAllocGeomCollXYM ();
else if (shp->EffectiveDims == GAIA_XY_Z_M)
geom = gaiaAllocGeomCollXYZM ();
else
geom = gaiaAllocGeomColl ();
if (shp->EffectiveType == GAIA_LINESTRING)
geom->DeclaredType = GAIA_LINESTRING;
else
geom->DeclaredType = GAIA_MULTILINESTRING;
geom->Srid = srid;
}
gaiaInsertLinestringInGeomColl (geom, line);
}
}
if (shape == GAIA_SHP_POLYGON)
{
/* shape polygon */
rd = fread (shp->BufShp, sizeof (unsigned char), 32, shp->flShp);
if (rd != 32)
goto error;
rd = fread (shp->BufShp, sizeof (unsigned char), (sz * 2) - 36,
shp->flShp);
if (rd != (sz * 2) - 36)
goto error;
n = gaiaImport32 (shp->BufShp, GAIA_LITTLE_ENDIAN, shp->endian_arch);
n1 = gaiaImport32 (shp->BufShp + 4, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
base = 8 + (n * 4);
start = 0;
for (ind = 0; ind < n; ind++)
{
if (ind < (n - 1))
end =
gaiaImport32 (shp->BufShp + 8 + ((ind + 1) * 4),
GAIA_LITTLE_ENDIAN, shp->endian_arch);
else
end = n1;
points = end - start;
if (shp->EffectiveDims == GAIA_XY_Z)
ring = gaiaAllocRingXYZ (points);
else if (shp->EffectiveDims == GAIA_XY_M)
ring = gaiaAllocRingXYM (points);
else if (shp->EffectiveDims == GAIA_XY_Z_M)
ring = gaiaAllocRingXYZM (points);
else
ring = gaiaAllocRing (points);
points = 0;
for (iv = start; iv < end; iv++)
{
x = gaiaImport64 (shp->BufShp + base + (iv * 16),
GAIA_LITTLE_ENDIAN, shp->endian_arch);
y = gaiaImport64 (shp->BufShp + base + (iv * 16) +
8, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
if (shp->EffectiveDims == GAIA_XY_Z)
{
gaiaSetPointXYZ (ring->Coords, points, x, y, 0.0);
}
else if (shp->EffectiveDims == GAIA_XY_M)
{
gaiaSetPointXYM (ring->Coords, points, x, y, 0.0);
}
else if (shp->EffectiveDims == GAIA_XY_Z_M)
{
gaiaSetPointXYZM (ring->Coords, points, x, y,
0.0, 0.0);
}
else
{
gaiaSetPoint (ring->Coords, points, x, y);
}
start++;
points++;
}
shp_add_ring (&ringsColl, ring);
}
shp_arrange_rings (&ringsColl);
/* allocating the final geometry */
if (shp->EffectiveDims == GAIA_XY_Z)
geom = gaiaAllocGeomCollXYZ ();
else if (shp->EffectiveDims == GAIA_XY_M)
geom = gaiaAllocGeomCollXYM ();
else if (shp->EffectiveDims == GAIA_XY_Z_M)
geom = gaiaAllocGeomCollXYZM ();
else
geom = gaiaAllocGeomColl ();
if (shp->EffectiveType == GAIA_POLYGON)
geom->DeclaredType = GAIA_POLYGON;
else
geom->DeclaredType = GAIA_MULTIPOLYGON;
geom->Srid = srid;
shp_build_area (&ringsColl, geom);
}
if (shape == GAIA_SHP_POLYGONZ)
{
/* shape polygon Z */
rd = fread (shp->BufShp, sizeof (unsigned char), 32, shp->flShp);
if (rd != 32)
goto error;
rd = fread (shp->BufShp, sizeof (unsigned char), (sz * 2) - 36,
shp->flShp);
if (rd != (sz * 2) - 36)
goto error;
n = gaiaImport32 (shp->BufShp, GAIA_LITTLE_ENDIAN, shp->endian_arch);
n1 = gaiaImport32 (shp->BufShp + 4, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
hasM = 0;
max_size = 38 + (2 * n) + (n1 * 16); /* size [in 16 bits words !!!] ZM */
min_size = 30 + (2 * n) + (n1 * 12); /* size [in 16 bits words !!!] Z-only */
if (sz < min_size)
goto error;
if (sz == max_size)
hasM = 1;
base = 8 + (n * 4);
baseZ = base + (n1 * 16) + 16;
baseM = baseZ + (n1 * 8) + 16;
start = 0;
for (ind = 0; ind < n; ind++)
{
if (ind < (n - 1))
end =
gaiaImport32 (shp->BufShp + 8 + ((ind + 1) * 4),
GAIA_LITTLE_ENDIAN, shp->endian_arch);
else
end = n1;
points = end - start;
if (shp->EffectiveDims == GAIA_XY_Z)
ring = gaiaAllocRingXYZ (points);
else if (shp->EffectiveDims == GAIA_XY_M)
ring = gaiaAllocRingXYM (points);
else if (shp->EffectiveDims == GAIA_XY_Z_M)
ring = gaiaAllocRingXYZM (points);
else
ring = gaiaAllocRing (points);
points = 0;
for (iv = start; iv < end; iv++)
{
x = gaiaImport64 (shp->BufShp + base + (iv * 16),
GAIA_LITTLE_ENDIAN, shp->endian_arch);
y = gaiaImport64 (shp->BufShp + base + (iv * 16) +
8, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
z = gaiaImport64 (shp->BufShp + baseZ + (iv * 8),
GAIA_LITTLE_ENDIAN, shp->endian_arch);
if (hasM)
m = gaiaImport64 (shp->BufShp + baseM +
(iv * 8), GAIA_LITTLE_ENDIAN,
shp->endian_arch);
else
m = 0.0;
if (m < SHAPEFILE_NO_DATA)
m = 0.0;
if (shp->EffectiveDims == GAIA_XY_Z)
{
gaiaSetPointXYZ (ring->Coords, points, x, y, z);
}
else if (shp->EffectiveDims == GAIA_XY_M)
{
gaiaSetPointXYM (ring->Coords, points, x, y, m);
}
else if (shp->EffectiveDims == GAIA_XY_Z_M)
{
gaiaSetPointXYZM (ring->Coords, points, x, y, z, m);
}
else
{
gaiaSetPoint (ring->Coords, points, x, y);
}
start++;
points++;
}
shp_add_ring (&ringsColl, ring);
}
shp_arrange_rings (&ringsColl);
/* allocating the final geometry */
if (shp->EffectiveDims == GAIA_XY_Z)
geom = gaiaAllocGeomCollXYZ ();
else if (shp->EffectiveDims == GAIA_XY_M)
geom = gaiaAllocGeomCollXYM ();
else if (shp->EffectiveDims == GAIA_XY_Z_M)
geom = gaiaAllocGeomCollXYZM ();
else
geom = gaiaAllocGeomColl ();
if (shp->EffectiveType == GAIA_POLYGON)
geom->DeclaredType = GAIA_POLYGON;
else
geom->DeclaredType = GAIA_MULTIPOLYGON;
geom->Srid = srid;
shp_build_area (&ringsColl, geom);
}
if (shape == GAIA_SHP_POLYGONM)
{
/* shape polygon M */
rd = fread (shp->BufShp, sizeof (unsigned char), 32, shp->flShp);
if (rd != 32)
goto error;
rd = fread (shp->BufShp, sizeof (unsigned char), (sz * 2) - 36,
shp->flShp);
if (rd != (sz * 2) - 36)
goto error;
n = gaiaImport32 (shp->BufShp, GAIA_LITTLE_ENDIAN, shp->endian_arch);
n1 = gaiaImport32 (shp->BufShp + 4, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
hasM = 0;
max_size = 30 + (2 * n) + (n1 * 12); /* size [in 16 bits words !!!] M */
min_size = 22 + (2 * n) + (n1 * 8); /* size [in 16 bits words !!!] no-M */
if (sz < min_size)
goto error;
if (sz == max_size)
hasM = 1;
base = 8 + (n * 4);
baseM = base + (n1 * 16) + 16;
start = 0;
for (ind = 0; ind < n; ind++)
{
if (ind < (n - 1))
end =
gaiaImport32 (shp->BufShp + 8 + ((ind + 1) * 4),
GAIA_LITTLE_ENDIAN, shp->endian_arch);
else
end = n1;
points = end - start;
if (shp->EffectiveDims == GAIA_XY_Z)
ring = gaiaAllocRingXYZ (points);
else if (shp->EffectiveDims == GAIA_XY_M)
ring = gaiaAllocRingXYM (points);
else if (shp->EffectiveDims == GAIA_XY_Z_M)
ring = gaiaAllocRingXYZM (points);
else
ring = gaiaAllocRing (points);
points = 0;
for (iv = start; iv < end; iv++)
{
x = gaiaImport64 (shp->BufShp + base + (iv * 16),
GAIA_LITTLE_ENDIAN, shp->endian_arch);
y = gaiaImport64 (shp->BufShp + base + (iv * 16) +
8, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
if (hasM)
m = gaiaImport64 (shp->BufShp + baseM +
(iv * 8), GAIA_LITTLE_ENDIAN,
shp->endian_arch);
m = 0.0;
if (m < SHAPEFILE_NO_DATA)
m = 0.0;
if (shp->EffectiveDims == GAIA_XY_Z)
{
gaiaSetPointXYZ (ring->Coords, points, x, y, 0.0);
}
else if (shp->EffectiveDims == GAIA_XY_M)
{
gaiaSetPointXYM (ring->Coords, points, x, y, m);
}
else if (shp->EffectiveDims == GAIA_XY_Z_M)
{
gaiaSetPointXYZM (ring->Coords, points, x, y,
0.0, m);
}
else
{
gaiaSetPoint (ring->Coords, points, x, y);
}
start++;
points++;
}
shp_add_ring (&ringsColl, ring);
}
shp_arrange_rings (&ringsColl);
/* allocating the final geometry */
if (shp->EffectiveDims == GAIA_XY_Z)
geom = gaiaAllocGeomCollXYZ ();
else if (shp->EffectiveDims == GAIA_XY_M)
geom = gaiaAllocGeomCollXYM ();
else if (shp->EffectiveDims == GAIA_XY_Z_M)
geom = gaiaAllocGeomCollXYZM ();
else
geom = gaiaAllocGeomColl ();
if (shp->EffectiveType == GAIA_POLYGON)
geom->DeclaredType = GAIA_POLYGON;
else
geom->DeclaredType = GAIA_MULTIPOLYGON;
geom->Srid = srid;
shp_build_area (&ringsColl, geom);
}
if (shape == GAIA_SHP_MULTIPOINT)
{
/* shape multipoint */
rd = fread (shp->BufShp, sizeof (unsigned char), 32, shp->flShp);
if (rd != 32)
goto error;
rd = fread (shp->BufShp, sizeof (unsigned char), (sz * 2) - 36,
shp->flShp);
if (rd != (sz * 2) - 36)
goto error;
n = gaiaImport32 (shp->BufShp, GAIA_LITTLE_ENDIAN, shp->endian_arch);
if (shp->EffectiveDims == GAIA_XY_Z)
geom = gaiaAllocGeomCollXYZ ();
else if (shp->EffectiveDims == GAIA_XY_M)
geom = gaiaAllocGeomCollXYM ();
else if (shp->EffectiveDims == GAIA_XY_Z_M)
geom = gaiaAllocGeomCollXYZM ();
else
geom = gaiaAllocGeomColl ();
geom->DeclaredType = GAIA_MULTIPOINT;
geom->Srid = srid;
for (iv = 0; iv < n; iv++)
{
x = gaiaImport64 (shp->BufShp + 4 + (iv * 16),
GAIA_LITTLE_ENDIAN, shp->endian_arch);
y = gaiaImport64 (shp->BufShp + 4 + (iv * 16) + 8,
GAIA_LITTLE_ENDIAN, shp->endian_arch);
if (shp->EffectiveDims == GAIA_XY_Z)
gaiaAddPointToGeomCollXYZ (geom, x, y, 0.0);
else if (shp->EffectiveDims == GAIA_XY_M)
gaiaAddPointToGeomCollXYM (geom, x, y, 0.0);
else if (shp->EffectiveDims == GAIA_XY_Z_M)
gaiaAddPointToGeomCollXYZM (geom, x, y, 0.0, 0.0);
else
gaiaAddPointToGeomColl (geom, x, y);
}
}
if (shape == GAIA_SHP_MULTIPOINTZ)
{
/* shape multipoint Z */
rd = fread (shp->BufShp, sizeof (unsigned char), 32, shp->flShp);
if (rd != 32)
goto error;
rd = fread (shp->BufShp, sizeof (unsigned char), (sz * 2) - 36,
shp->flShp);
if (rd != (sz * 2) - 36)
goto error;
n = gaiaImport32 (shp->BufShp, GAIA_LITTLE_ENDIAN, shp->endian_arch);
hasM = 0;
max_size = 36 + (n * 16); /* size [in 16 bits words !!!] ZM */
min_size = 28 + (n * 12); /* size [in 16 bits words !!!] Z-only */
if (sz < min_size)
goto error;
if (sz == max_size)
hasM = 1;
baseZ = 4 + (n * 16) + 16;
baseM = baseZ + (n * 8) + 16;
if (shp->EffectiveDims == GAIA_XY_Z)
geom = gaiaAllocGeomCollXYZ ();
else if (shp->EffectiveDims == GAIA_XY_M)
geom = gaiaAllocGeomCollXYM ();
else if (shp->EffectiveDims == GAIA_XY_Z_M)
geom = gaiaAllocGeomCollXYZM ();
else
geom = gaiaAllocGeomColl ();
geom->DeclaredType = GAIA_MULTIPOINT;
geom->Srid = srid;
for (iv = 0; iv < n; iv++)
{
x = gaiaImport64 (shp->BufShp + 4 + (iv * 16),
GAIA_LITTLE_ENDIAN, shp->endian_arch);
y = gaiaImport64 (shp->BufShp + 4 + (iv * 16) + 8,
GAIA_LITTLE_ENDIAN, shp->endian_arch);
z = gaiaImport64 (shp->BufShp + baseZ + (iv * 8),
GAIA_LITTLE_ENDIAN, shp->endian_arch);
if (hasM)
m = gaiaImport64 (shp->BufShp + baseM + (iv * 8),
GAIA_LITTLE_ENDIAN, shp->endian_arch);
else
m = 0.0;
if (m < SHAPEFILE_NO_DATA)
m = 0.0;
if (shp->EffectiveDims == GAIA_XY_Z)
gaiaAddPointToGeomCollXYZ (geom, x, y, z);
else if (shp->EffectiveDims == GAIA_XY_M)
gaiaAddPointToGeomCollXYM (geom, x, y, m);
else if (shp->EffectiveDims == GAIA_XY_Z_M)
gaiaAddPointToGeomCollXYZM (geom, x, y, z, m);
else
gaiaAddPointToGeomColl (geom, x, y);
}
}
if (shape == GAIA_SHP_MULTIPOINTM)
{
/* shape multipoint M */
rd = fread (shp->BufShp, sizeof (unsigned char), 32, shp->flShp);
if (rd != 32)
goto error;
rd = fread (shp->BufShp, sizeof (unsigned char), (sz * 2) - 36,
shp->flShp);
if (rd != (sz * 2) - 36)
goto error;
n = gaiaImport32 (shp->BufShp, GAIA_LITTLE_ENDIAN, shp->endian_arch);
hasM = 0;
max_size = 28 + (n * 12); /* size [in 16 bits words !!!] M */
min_size = 20 + (n * 8); /* size [in 16 bits words !!!] no-M */
if (sz < min_size)
goto error;
if (sz == max_size)
hasM = 1;
baseM = 4 + (n * 16) + 16;
if (shp->EffectiveDims == GAIA_XY_Z)
geom = gaiaAllocGeomCollXYZ ();
else if (shp->EffectiveDims == GAIA_XY_M)
geom = gaiaAllocGeomCollXYM ();
else if (shp->EffectiveDims == GAIA_XY_Z_M)
geom = gaiaAllocGeomCollXYZM ();
else
geom = gaiaAllocGeomColl ();
geom->DeclaredType = GAIA_MULTIPOINT;
geom->Srid = srid;
for (iv = 0; iv < n; iv++)
{
x = gaiaImport64 (shp->BufShp + 4 + (iv * 16),
GAIA_LITTLE_ENDIAN, shp->endian_arch);
y = gaiaImport64 (shp->BufShp + 4 + (iv * 16) + 8,
GAIA_LITTLE_ENDIAN, shp->endian_arch);
if (hasM)
m = gaiaImport64 (shp->BufShp + baseM + (iv * 8),
GAIA_LITTLE_ENDIAN, shp->endian_arch);
else
m = 0.0;
if (m < SHAPEFILE_NO_DATA)
m = 0.0;
if (shp->EffectiveDims == GAIA_XY_Z)
gaiaAddPointToGeomCollXYZ (geom, x, y, 0.0);
else if (shp->EffectiveDims == GAIA_XY_M)
gaiaAddPointToGeomCollXYM (geom, x, y, m);
else if (shp->EffectiveDims == GAIA_XY_Z_M)
gaiaAddPointToGeomCollXYZM (geom, x, y, 0.0, m);
else
gaiaAddPointToGeomColl (geom, x, y);
}
}
/* setting up the current SHP ENTITY */
null_shape:
gaiaResetDbfEntity (shp->Dbf);
shp->Dbf->RowId = current_row;
shp->Dbf->Geometry = geom;
/* fetching the DBF values */
pFld = shp->Dbf->First;
while (pFld)
{
if (!parseDbfField (shp->BufDbf, shp->IconvObj, pFld, text_dates))
goto conversion_error;
pFld = pFld->Next;
}
if (shp->LastError)
free (shp->LastError);
shp->LastError = NULL;
shp_free_rings (&ringsColl);
return 1;
eof:
if (shp->LastError)
free (shp->LastError);
shp->LastError = NULL;
shp_free_rings (&ringsColl);
return 0;
error:
if (shp->LastError)
free (shp->LastError);
sprintf (errMsg, "'%s' is corrupted / has invalid format", shp->Path);
len = strlen (errMsg);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, errMsg);
shp_free_rings (&ringsColl);
return 0;
conversion_error:
if (shp->LastError)
free (shp->LastError);
sprintf (errMsg, "Invalid character sequence");
len = strlen (errMsg);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, errMsg);
shp_free_rings (&ringsColl);
return 0;
}
static void
gaiaSaneClockwise (gaiaPolygonPtr polyg)
{
/*
/ when exporting POLYGONs to SHAPEFILE, we must guarantee that:
/ - all EXTERIOR RING must be clockwise
/ - all INTERIOR RING must be anti-clockwise
/
/ this function checks for the above conditions,
/ and if needed inverts the rings
*/
int ib;
int iv;
int iv2;
double x;
double y;
double z;
double m;
gaiaRingPtr new_ring;
gaiaRingPtr ring = polyg->Exterior;
gaiaClockwise (ring);
if (!(ring->Clockwise))
{
/* exterior ring needs inversion */
if (ring->DimensionModel == GAIA_XY_Z)
new_ring = gaiaAllocRingXYZ (ring->Points);
else if (ring->DimensionModel == GAIA_XY_M)
new_ring = gaiaAllocRingXYM (ring->Points);
else if (ring->DimensionModel == GAIA_XY_Z_M)
new_ring = gaiaAllocRingXYZM (ring->Points);
else
new_ring = gaiaAllocRing (ring->Points);
iv2 = 0;
for (iv = ring->Points - 1; iv >= 0; iv--)
{
if (ring->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (ring->Coords, iv, &x, &y, &z);
gaiaSetPointXYZ (new_ring->Coords, iv2, x, y, z);
}
else if (ring->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (ring->Coords, iv, &x, &y, &m);
gaiaSetPointXYM (new_ring->Coords, iv2, x, y, m);
}
else if (ring->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (ring->Coords, iv, &x, &y, &z, &m);
gaiaSetPointXYZM (new_ring->Coords, iv2, x, y, z, m);
}
else
{
gaiaGetPoint (ring->Coords, iv, &x, &y);
gaiaSetPoint (new_ring->Coords, iv2, x, y);
}
iv2++;
}
polyg->Exterior = new_ring;
gaiaFreeRing (ring);
}
for (ib = 0; ib < polyg->NumInteriors; ib++)
{
ring = polyg->Interiors + ib;
gaiaClockwise (ring);
if (ring->Clockwise)
{
/* interior ring needs inversion */
if (ring->DimensionModel == GAIA_XY_Z)
new_ring = gaiaAllocRingXYZ (ring->Points);
else if (ring->DimensionModel == GAIA_XY_M)
new_ring = gaiaAllocRingXYM (ring->Points);
else if (ring->DimensionModel == GAIA_XY_Z_M)
new_ring = gaiaAllocRingXYZM (ring->Points);
else
new_ring = gaiaAllocRing (ring->Points);
iv2 = 0;
for (iv = ring->Points - 1; iv >= 0; iv--)
{
if (ring->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (ring->Coords, iv, &x, &y, &z);
gaiaSetPointXYZ (new_ring->Coords, iv2, x, y, z);
}
else if (ring->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (ring->Coords, iv, &x, &y, &m);
gaiaSetPointXYM (new_ring->Coords, iv2, x, y, m);
}
else if (ring->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (ring->Coords, iv, &x, &y, &z, &m);
gaiaSetPointXYZM (new_ring->Coords, iv2, x, y,
z, m);
}
else
{
gaiaGetPoint (ring->Coords, iv, &x, &y);
gaiaSetPoint (new_ring->Coords, iv2, x, y);
}
iv2++;
}
for (iv = 0; iv < ring->Points; iv++)
{
if (ring->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (new_ring->Coords, iv, &x, &y, &z);
gaiaSetPointXYZ (ring->Coords, iv, x, y, z);
}
else if (ring->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (new_ring->Coords, iv, &x, &y, &m);
gaiaSetPointXYM (ring->Coords, iv, x, y, m);
}
else if (ring->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (new_ring->Coords, iv, &x, &y,
&z, &m);
gaiaSetPointXYZM (ring->Coords, iv, x, y, z, m);
}
else
{
gaiaGetPoint (new_ring->Coords, iv, &x, &y);
gaiaSetPoint (ring->Coords, iv, x, y);
}
}
gaiaFreeRing (new_ring);
}
}
}
GAIAGEO_DECLARE int
gaiaWriteShpEntity (gaiaShapefilePtr shp, gaiaDbfListPtr entity)
{
/* trying to write an entity into shapefile */
char dummy[128];
char fmt[16];
int endian_arch = shp->endian_arch;
gaiaDbfFieldPtr fld;
int iv;
int tot_ln;
int tot_v;
int tot_pts;
int this_size;
int ix;
double x;
double y;
double z;
double m;
int hasM;
double minZ;
double maxZ;
double minM;
double maxM;
#if !defined(__MINGW32__) && defined(_WIN32)
const char *pBuf;
#else /* not WIN32 */
char *pBuf;
#endif
size_t len;
size_t utf8len;
char *dynbuf;
char *pUtf8buf;
char utf8buf[2048];
/* writing the DBF record */
memset (shp->BufDbf, '\0', shp->DbfReclen);
*(shp->BufDbf) = ' '; /* in DBF first byte of each row marks for validity or deletion */
fld = entity->First;
while (fld)
{
/* transferring field values */
switch (fld->Type)
{
case 'L':
if (!(fld->Value))
*(shp->BufDbf + fld->Offset) = '?';
else if (fld->Value->Type != GAIA_INT_VALUE)
*(shp->BufDbf + fld->Offset + 1) = '?';
else
{
if (fld->Value->IntValue == 0)
*(shp->BufDbf + fld->Offset + 1) = 'N';
else
*(shp->BufDbf + fld->Offset + 1) = 'Y';
}
break;
case 'D':
memset (shp->BufDbf + fld->Offset + 1, '0', 8);
if (fld->Value)
{
if (fld->Value->Type == GAIA_TEXT_VALUE)
{
if (strlen (fld->Value->TxtValue) == 8)
memcpy (shp->BufDbf + fld->Offset + 1,
fld->Value->TxtValue, 8);
}
}
break;
case 'C':
memset (shp->BufDbf + fld->Offset + 1, ' ', fld->Length);
if (fld->Value)
{
if (fld->Value->Type == GAIA_TEXT_VALUE)
{
len = strlen (fld->Value->TxtValue);
dynbuf = malloc (len + 1);
strcpy (dynbuf, fld->Value->TxtValue);
if (len > 512)
{
dynbuf[512] = '\0';
len = strlen (dynbuf);
}
utf8len = 2048;
pBuf = dynbuf;
pUtf8buf = utf8buf;
if (iconv
((iconv_t) (shp->IconvObj), &pBuf, &len,
&pUtf8buf, &utf8len) == (size_t) (-1))
{
free (dynbuf);
goto conversion_error;
}
memcpy (dynbuf, utf8buf, 2048 - utf8len);
dynbuf[2048 - utf8len] = '\0';
if (strlen (dynbuf) < fld->Length)
memcpy (shp->BufDbf + fld->Offset + 1, dynbuf,
strlen (dynbuf));
else
memcpy (shp->BufDbf + fld->Offset + 1, dynbuf,
fld->Length);
free (dynbuf);
}
}
break;
case 'N':
memset (shp->BufDbf + fld->Offset + 1, '\0', fld->Length);
if (fld->Value)
{
if (fld->Value->Type == GAIA_INT_VALUE)
{
sprintf (dummy, FRMT64, fld->Value->IntValue);
if (strlen (dummy) <= fld->Length)
memcpy (shp->BufDbf + fld->Offset + 1,
dummy, strlen (dummy));
}
if (fld->Value->Type == GAIA_DOUBLE_VALUE)
{
sprintf (fmt, "%%1.%df", fld->Decimals);
sprintf (dummy, fmt, fld->Value->DblValue);
if (strlen (dummy) <= fld->Length)
memcpy (shp->BufDbf + fld->Offset + 1,
dummy, strlen (dummy));
}
}
break;
};
fld = fld->Next;
}
if (!(entity->Geometry))
{
/* exporting a NULL Shape */
gaiaExport32 (shp->BufShp, shp->ShpSize, GAIA_BIG_ENDIAN, endian_arch); /* exports current SHP file position */
gaiaExport32 (shp->BufShp + 4, 2, GAIA_BIG_ENDIAN, endian_arch); /* exports entitiy size [in 16 bits words !!!] */
fwrite (shp->BufShp, 1, 8, shp->flShx);
(shp->ShxSize) += 4; /* updating current SHX file position [in 16 bits words !!!] */
gaiaExport32 (shp->BufShp, shp->DbfRecno + 1, GAIA_BIG_ENDIAN, endian_arch); /* exports entity ID */
gaiaExport32 (shp->BufShp + 4, 2, GAIA_BIG_ENDIAN, endian_arch); /* exports entity size [in 16 bits words !!!] */
gaiaExport32 (shp->BufShp + 8, GAIA_SHP_NULL, GAIA_LITTLE_ENDIAN, endian_arch); /* exports geometry type = NULL */
fwrite (shp->BufShp, 1, 12, shp->flShp);
(shp->ShpSize) += 6; /* updating current SHP file position [in 16 bits words !!!] */
}
else
{
/* updates the shapefile main MBR-BBOX */
gaiaMbrGeometry (entity->Geometry);
if (entity->Geometry->MinX < shp->MinX)
shp->MinX = entity->Geometry->MinX;
if (entity->Geometry->MaxX > shp->MaxX)
shp->MaxX = entity->Geometry->MaxX;
if (entity->Geometry->MinY < shp->MinY)
shp->MinY = entity->Geometry->MinY;
if (entity->Geometry->MaxY > shp->MaxY)
shp->MaxY = entity->Geometry->MaxY;
if (shp->Shape == GAIA_SHP_POINT)
{
/* this one is expected to be a POINT */
gaiaPointPtr pt = entity->Geometry->FirstPoint;
if (!pt)
{
strcpy (dummy,
"a POINT is expected, but there is no POINT in geometry");
if (shp->LastError)
free (shp->LastError);
len = strlen (dummy);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, dummy);
return 0;
}
/* inserting POINT entity into SHX file */
gaiaExport32 (shp->BufShp, shp->ShpSize, GAIA_BIG_ENDIAN, endian_arch); /* exports current SHP file position */
gaiaExport32 (shp->BufShp + 4, 10, GAIA_BIG_ENDIAN, endian_arch); /* exports entitiy size [in 16 bits words !!!] */
fwrite (shp->BufShp, 1, 8, shp->flShx);
(shp->ShxSize) += 4; /* updating current SHX file position [in 16 bits words !!!] */
/* inserting POINT into SHP file */
gaiaExport32 (shp->BufShp, shp->DbfRecno + 1, GAIA_BIG_ENDIAN, endian_arch); /* exports entity ID */
gaiaExport32 (shp->BufShp + 4, 10, GAIA_BIG_ENDIAN, endian_arch); /* exports entity size [in 16 bits words !!!] */
gaiaExport32 (shp->BufShp + 8, GAIA_SHP_POINT, GAIA_LITTLE_ENDIAN, endian_arch); /* exports geometry type = POINT */
gaiaExport64 (shp->BufShp + 12, pt->X, GAIA_LITTLE_ENDIAN, endian_arch); /* exports X coordinate */
gaiaExport64 (shp->BufShp + 20, pt->Y, GAIA_LITTLE_ENDIAN, endian_arch); /* exports Y coordinate */
fwrite (shp->BufShp, 1, 28, shp->flShp);
(shp->ShpSize) += 14; /* updating current SHP file position [in 16 bits words !!!] */
}
if (shp->Shape == GAIA_SHP_POINTZ)
{
/* this one is expected to be a POINT Z */
gaiaPointPtr pt = entity->Geometry->FirstPoint;
if (!pt)
{
strcpy (dummy,
"a POINT is expected, but there is no POINT in geometry");
if (shp->LastError)
free (shp->LastError);
len = strlen (dummy);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, dummy);
return 0;
}
/* inserting POINT Z entity into SHX file */
gaiaExport32 (shp->BufShp, shp->ShpSize, GAIA_BIG_ENDIAN, endian_arch); /* exports current SHP file position */
gaiaExport32 (shp->BufShp + 4, 18, GAIA_BIG_ENDIAN, endian_arch); /* exports entitiy size [in 16 bits words !!!] */
fwrite (shp->BufShp, 1, 8, shp->flShx);
(shp->ShxSize) += 4; /* updating current SHX file position [in 16 bits words !!!] */
/* inserting POINT into SHP file */
gaiaExport32 (shp->BufShp, shp->DbfRecno + 1, GAIA_BIG_ENDIAN, endian_arch); /* exports entity ID */
gaiaExport32 (shp->BufShp + 4, 18, GAIA_BIG_ENDIAN, endian_arch); /* exports entity size [in 16 bits words !!!] */
gaiaExport32 (shp->BufShp + 8, GAIA_SHP_POINTZ, GAIA_LITTLE_ENDIAN, endian_arch); /* exports geometry type = POINT Z */
gaiaExport64 (shp->BufShp + 12, pt->X, GAIA_LITTLE_ENDIAN, endian_arch); /* exports X coordinate */
gaiaExport64 (shp->BufShp + 20, pt->Y, GAIA_LITTLE_ENDIAN, endian_arch); /* exports Y coordinate */
gaiaExport64 (shp->BufShp + 28, pt->Z, GAIA_LITTLE_ENDIAN, endian_arch); /* exports Z coordinate */
gaiaExport64 (shp->BufShp + 36, pt->M, GAIA_LITTLE_ENDIAN, endian_arch); /* exports M coordinate */
fwrite (shp->BufShp, 1, 44, shp->flShp);
(shp->ShpSize) += 22; /* updating current SHP file position [in 16 bits words !!!] */
}
if (shp->Shape == GAIA_SHP_POINTM)
{
/* this one is expected to be a POINT M */
gaiaPointPtr pt = entity->Geometry->FirstPoint;
if (!pt)
{
strcpy (dummy,
"a POINT is expected, but there is no POINT in geometry");
if (shp->LastError)
free (shp->LastError);
len = strlen (dummy);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, dummy);
return 0;
}
/* inserting POINT entity into SHX file */
gaiaExport32 (shp->BufShp, shp->ShpSize, GAIA_BIG_ENDIAN, endian_arch); /* exports current SHP file position */
gaiaExport32 (shp->BufShp + 4, 14, GAIA_BIG_ENDIAN, endian_arch); /* exports entitiy size [in 16 bits words !!!] */
fwrite (shp->BufShp, 1, 8, shp->flShx);
(shp->ShxSize) += 4; /* updating current SHX file position [in 16 bits words !!!] */
/* inserting POINT into SHP file */
gaiaExport32 (shp->BufShp, shp->DbfRecno + 1, GAIA_BIG_ENDIAN, endian_arch); /* exports entity ID */
gaiaExport32 (shp->BufShp + 4, 14, GAIA_BIG_ENDIAN, endian_arch); /* exports entity size [in 16 bits words !!!] */
gaiaExport32 (shp->BufShp + 8, GAIA_SHP_POINTM, GAIA_LITTLE_ENDIAN, endian_arch); /* exports geometry type = POINT M */
gaiaExport64 (shp->BufShp + 12, pt->X, GAIA_LITTLE_ENDIAN, endian_arch); /* exports X coordinate */
gaiaExport64 (shp->BufShp + 20, pt->Y, GAIA_LITTLE_ENDIAN, endian_arch); /* exports Y coordinate */
gaiaExport64 (shp->BufShp + 28, pt->Y, GAIA_LITTLE_ENDIAN, endian_arch); /* exports M coordinate */
fwrite (shp->BufShp, 1, 36, shp->flShp);
(shp->ShpSize) += 18; /* updating current SHP file position [in 16 bits words !!!] */
}
if (shp->Shape == GAIA_SHP_POLYLINE)
{
/* this one is expected to be a LINESTRING / MULTILINESTRING */
gaiaLinestringPtr line;
tot_ln = 0;
tot_v = 0;
line = entity->Geometry->FirstLinestring;
while (line)
{
/* computes # lines and total # points */
tot_v += line->Points;
tot_ln++;
line = line->Next;
}
if (!tot_ln)
{
strcpy (dummy,
"a LINESTRING is expected, but there is no LINESTRING in geometry");
if (shp->LastError)
free (shp->LastError);
len = strlen (dummy);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, dummy);
return 0;
}
this_size = 22 + (2 * tot_ln) + (tot_v * 8); /* size [in 16 bits words !!!] for this SHP entity */
if ((this_size * 2) + 1024 > shp->ShpBfsz)
{
/* current buffer is too small; we need to allocate a bigger one */
free (shp->BufShp);
shp->ShpBfsz = (this_size * 2) + 1024;
shp->BufShp = malloc (shp->ShpBfsz);
}
/* inserting LINESTRING or MULTILINESTRING in SHX file */
gaiaExport32 (shp->BufShp, shp->ShpSize, GAIA_BIG_ENDIAN, endian_arch); /* exports current SHP file position */
gaiaExport32 (shp->BufShp + 4, this_size, GAIA_BIG_ENDIAN, endian_arch); /* exports entitiy size [in 16 bits words !!!] */
fwrite (shp->BufShp, 1, 8, shp->flShx);
(shp->ShxSize) += 4;
/* inserting LINESTRING or MULTILINESTRING in SHP file */
gaiaExport32 (shp->BufShp, shp->DbfRecno + 1, GAIA_BIG_ENDIAN, endian_arch); /* exports entity ID */
gaiaExport32 (shp->BufShp + 4, this_size, GAIA_BIG_ENDIAN, endian_arch); /* exports entity size [in 16 bits words !!!] */
gaiaExport32 (shp->BufShp + 8, GAIA_SHP_POLYLINE, GAIA_LITTLE_ENDIAN, endian_arch); /* exports geometry type = POLYLINE */
gaiaExport64 (shp->BufShp + 12, entity->Geometry->MinX, GAIA_LITTLE_ENDIAN, endian_arch); /* exports the MBR for this geometry */
gaiaExport64 (shp->BufShp + 20, entity->Geometry->MinY,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (shp->BufShp + 28, entity->Geometry->MaxX,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (shp->BufShp + 36, entity->Geometry->MaxY,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport32 (shp->BufShp + 44, tot_ln, GAIA_LITTLE_ENDIAN, endian_arch); /* exports # lines in this polyline */
gaiaExport32 (shp->BufShp + 48, tot_v, GAIA_LITTLE_ENDIAN, endian_arch); /* exports total # points */
tot_v = 0; /* resets points counter */
ix = 52; /* sets current buffer offset */
line = entity->Geometry->FirstLinestring;
while (line)
{
/* exports start point index for each line */
gaiaExport32 (shp->BufShp + ix, tot_v,
GAIA_LITTLE_ENDIAN, endian_arch);
tot_v += line->Points;
ix += 4;
line = line->Next;
}
line = entity->Geometry->FirstLinestring;
while (line)
{
/* exports points for each line */
for (iv = 0; iv < line->Points; iv++)
{
/* exports a POINT [x,y] */
if (line->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (line->Coords, iv, &x, &y,
&z);
}
else if (line->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (line->Coords, iv, &x, &y,
&m);
}
else if (line->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (line->Coords, iv, &x,
&y, &z, &m);
}
else
{
gaiaGetPoint (line->Coords, iv, &x, &y);
}
gaiaExport64 (shp->BufShp + ix, x,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, y,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
}
line = line->Next;
}
fwrite (shp->BufShp, 1, ix, shp->flShp);
(shp->ShpSize) += (ix / 2); /* updating current SHP file position [in 16 bits words !!!] */
}
if (shp->Shape == GAIA_SHP_POLYLINEZ)
{
/* this one is expected to be a LINESTRING / MULTILINESTRING Z */
gaiaLinestringPtr line;
gaiaZRangeGeometry (entity->Geometry, &minZ, &maxZ);
gaiaMRangeGeometry (entity->Geometry, &minM, &maxM);
tot_ln = 0;
tot_v = 0;
line = entity->Geometry->FirstLinestring;
while (line)
{
/* computes # lines and total # points */
tot_v += line->Points;
tot_ln++;
line = line->Next;
}
if (!tot_ln)
{
strcpy (dummy,
"a LINESTRING is expected, but there is no LINESTRING in geometry");
if (shp->LastError)
free (shp->LastError);
len = strlen (dummy);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, dummy);
return 0;
}
hasM = 0;
if (shp->EffectiveDims == GAIA_XY_M
|| shp->EffectiveDims == GAIA_XY_Z_M)
hasM = 1;
if (hasM)
this_size = 38 + (2 * tot_ln) + (tot_v * 16); /* size [in 16 bits words !!!] ZM */
else
this_size = 30 + (2 * tot_ln) + (tot_v * 12); /* size [in 16 bits words !!!] Z-only */
if ((this_size * 2) + 1024 > shp->ShpBfsz)
{
/* current buffer is too small; we need to allocate a bigger one */
free (shp->BufShp);
shp->ShpBfsz = (this_size * 2) + 1024;
shp->BufShp = malloc (shp->ShpBfsz);
}
/* inserting LINESTRING or MULTILINESTRING in SHX file */
gaiaExport32 (shp->BufShp, shp->ShpSize, GAIA_BIG_ENDIAN, endian_arch); /* exports current SHP file position */
gaiaExport32 (shp->BufShp + 4, this_size, GAIA_BIG_ENDIAN, endian_arch); /* exports entitiy size [in 16 bits words !!!] */
fwrite (shp->BufShp, 1, 8, shp->flShx);
(shp->ShxSize) += 4;
/* inserting LINESTRING or MULTILINESTRING in SHP file */
gaiaExport32 (shp->BufShp, shp->DbfRecno + 1, GAIA_BIG_ENDIAN, endian_arch); /* exports entity ID */
gaiaExport32 (shp->BufShp + 4, this_size, GAIA_BIG_ENDIAN, endian_arch); /* exports entity size [in 16 bits words !!!] */
gaiaExport32 (shp->BufShp + 8, GAIA_SHP_POLYLINEZ, GAIA_LITTLE_ENDIAN, endian_arch); /* exports geometry type = POLYLINE Z */
gaiaExport64 (shp->BufShp + 12, entity->Geometry->MinX, GAIA_LITTLE_ENDIAN, endian_arch); /* exports the MBR for this geometry */
gaiaExport64 (shp->BufShp + 20, entity->Geometry->MinY,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (shp->BufShp + 28, entity->Geometry->MaxX,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (shp->BufShp + 36, entity->Geometry->MaxY,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport32 (shp->BufShp + 44, tot_ln, GAIA_LITTLE_ENDIAN, endian_arch); /* exports # lines in this polyline */
gaiaExport32 (shp->BufShp + 48, tot_v, GAIA_LITTLE_ENDIAN, endian_arch); /* exports total # points */
tot_v = 0; /* resets points counter */
ix = 52; /* sets current buffer offset */
line = entity->Geometry->FirstLinestring;
while (line)
{
/* exports start point index for each line */
gaiaExport32 (shp->BufShp + ix, tot_v,
GAIA_LITTLE_ENDIAN, endian_arch);
tot_v += line->Points;
ix += 4;
line = line->Next;
}
line = entity->Geometry->FirstLinestring;
while (line)
{
/* exports points for each line */
for (iv = 0; iv < line->Points; iv++)
{
/* exports a POINT [x,y] */
if (line->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (line->Coords, iv, &x, &y,
&z);
}
else if (line->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (line->Coords, iv, &x, &y,
&m);
}
else if (line->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (line->Coords, iv, &x,
&y, &z, &m);
}
else
{
gaiaGetPoint (line->Coords, iv, &x, &y);
}
gaiaExport64 (shp->BufShp + ix, x,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, y,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
}
line = line->Next;
}
/* exporting the Z-range [min/max] */
gaiaExport64 (shp->BufShp + ix, minZ, GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, maxZ, GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
line = entity->Geometry->FirstLinestring;
while (line)
{
/* exports Z-values for each line */
for (iv = 0; iv < line->Points; iv++)
{
/* exports Z-value */
z = 0.0;
if (line->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (line->Coords, iv, &x, &y,
&z);
}
else if (line->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (line->Coords, iv, &x, &y,
&m);
}
else if (line->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (line->Coords, iv, &x,
&y, &z, &m);
}
else
{
gaiaGetPoint (line->Coords, iv, &x, &y);
}
gaiaExport64 (shp->BufShp + ix, z,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
}
line = line->Next;
}
if (hasM)
{
/* exporting the M-range [min/max] */
gaiaExport64 (shp->BufShp + ix, minM,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, maxM,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
line = entity->Geometry->FirstLinestring;
while (line)
{
/* exports M-values for each line */
for (iv = 0; iv < line->Points; iv++)
{
/* exports M-value */
m = 0.0;
if (line->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (line->Coords, iv,
&x, &y, &z);
}
else if (line->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (line->Coords, iv,
&x, &y, &m);
}
else if (line->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (line->Coords, iv,
&x, &y, &z, &m);
}
else
{
gaiaGetPoint (line->Coords, iv, &x, &y);
}
gaiaExport64 (shp->BufShp + ix, m,
GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
}
line = line->Next;
}
}
fwrite (shp->BufShp, 1, ix, shp->flShp);
(shp->ShpSize) += (ix / 2); /* updating current SHP file position [in 16 bits words !!!] */
}
if (shp->Shape == GAIA_SHP_POLYLINEM)
{
/* this one is expected to be a LINESTRING / MULTILINESTRING M */
gaiaLinestringPtr line;
gaiaMRangeGeometry (entity->Geometry, &minM, &maxM);
tot_ln = 0;
tot_v = 0;
line = entity->Geometry->FirstLinestring;
while (line)
{
/* computes # lines and total # points */
tot_v += line->Points;
tot_ln++;
line = line->Next;
}
if (!tot_ln)
{
strcpy (dummy,
"a LINESTRING is expected, but there is no LINESTRING in geometry");
if (shp->LastError)
free (shp->LastError);
len = strlen (dummy);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, dummy);
return 0;
}
this_size = 30 + (2 * tot_ln) + (tot_v * 12); /* size [in 16 bits words !!!] for this SHP entity */
if ((this_size * 2) + 1024 > shp->ShpBfsz)
{
/* current buffer is too small; we need to allocate a bigger one */
free (shp->BufShp);
shp->ShpBfsz = (this_size * 2) + 1024;
shp->BufShp = malloc (shp->ShpBfsz);
}
/* inserting LINESTRING or MULTILINESTRING in SHX file */
gaiaExport32 (shp->BufShp, shp->ShpSize, GAIA_BIG_ENDIAN, endian_arch); /* exports current SHP file position */
gaiaExport32 (shp->BufShp + 4, this_size, GAIA_BIG_ENDIAN, endian_arch); /* exports entitiy size [in 16 bits words !!!] */
fwrite (shp->BufShp, 1, 8, shp->flShx);
(shp->ShxSize) += 4;
/* inserting LINESTRING or MULTILINESTRING in SHP file */
gaiaExport32 (shp->BufShp, shp->DbfRecno + 1, GAIA_BIG_ENDIAN, endian_arch); /* exports entity ID */
gaiaExport32 (shp->BufShp + 4, this_size, GAIA_BIG_ENDIAN, endian_arch); /* exports entity size [in 16 bits words !!!] */
gaiaExport32 (shp->BufShp + 8, GAIA_SHP_POLYLINEM, GAIA_LITTLE_ENDIAN, endian_arch); /* exports geometry type = POLYLINE M */
gaiaExport64 (shp->BufShp + 12, entity->Geometry->MinX, GAIA_LITTLE_ENDIAN, endian_arch); /* exports the MBR for this geometry */
gaiaExport64 (shp->BufShp + 20, entity->Geometry->MinY,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (shp->BufShp + 28, entity->Geometry->MaxX,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (shp->BufShp + 36, entity->Geometry->MaxY,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport32 (shp->BufShp + 44, tot_ln, GAIA_LITTLE_ENDIAN, endian_arch); /* exports # lines in this polyline */
gaiaExport32 (shp->BufShp + 48, tot_v, GAIA_LITTLE_ENDIAN, endian_arch); /* exports total # points */
tot_v = 0; /* resets points counter */
ix = 52; /* sets current buffer offset */
line = entity->Geometry->FirstLinestring;
while (line)
{
/* exports start point index for each line */
gaiaExport32 (shp->BufShp + ix, tot_v,
GAIA_LITTLE_ENDIAN, endian_arch);
tot_v += line->Points;
ix += 4;
line = line->Next;
}
line = entity->Geometry->FirstLinestring;
while (line)
{
/* exports points for each line */
for (iv = 0; iv < line->Points; iv++)
{
/* exports a POINT [x,y] */
if (line->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (line->Coords, iv, &x, &y,
&z);
}
else if (line->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (line->Coords, iv, &x, &y,
&m);
}
else if (line->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (line->Coords, iv, &x,
&y, &z, &m);
}
else
{
gaiaGetPoint (line->Coords, iv, &x, &y);
}
gaiaExport64 (shp->BufShp + ix, x,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, y,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
}
line = line->Next;
}
/* exporting the M-range [min/max] */
gaiaExport64 (shp->BufShp + ix, minM, GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, maxM, GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
line = entity->Geometry->FirstLinestring;
while (line)
{
/* exports M-values for each line */
for (iv = 0; iv < line->Points; iv++)
{
/* exports M-value */
m = 0.0;
if (line->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (line->Coords, iv, &x, &y,
&z);
}
else if (line->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (line->Coords, iv, &x, &y,
&m);
}
else if (line->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (line->Coords, iv, &x,
&y, &z, &m);
}
else
{
gaiaGetPoint (line->Coords, iv, &x, &y);
}
gaiaExport64 (shp->BufShp + ix, m,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
}
line = line->Next;
}
fwrite (shp->BufShp, 1, ix, shp->flShp);
(shp->ShpSize) += (ix / 2); /* updating current SHP file position [in 16 bits words !!!] */
}
if (shp->Shape == GAIA_SHP_POLYGON)
{
/* this one is expected to be a POLYGON or a MULTIPOLYGON */
gaiaPolygonPtr polyg;
gaiaRingPtr ring;
int ib;
tot_ln = 0;
tot_v = 0;
polyg = entity->Geometry->FirstPolygon;
while (polyg)
{
/* computes # rings and total # points */
gaiaSaneClockwise (polyg); /* we must assure that exterior ring is clockwise, and interior rings are anti-clockwise */
ring = polyg->Exterior; /* this one is the exterior ring */
tot_v += ring->Points;
tot_ln++;
for (ib = 0; ib < polyg->NumInteriors; ib++)
{
/* that ones are the interior rings */
ring = polyg->Interiors + ib;
tot_v += ring->Points;
tot_ln++;
}
polyg = polyg->Next;
}
if (!tot_ln)
{
strcpy (dummy,
"a POLYGON is expected, but there is no POLYGON in geometry");
if (shp->LastError)
free (shp->LastError);
len = strlen (dummy);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, dummy);
return 0;
}
this_size = 22 + (2 * tot_ln) + (tot_v * 8); /* size [in 16 bits words !!!] for this SHP entity */
if ((this_size * 2) + 1024 > shp->ShpBfsz)
{
/* current buffer is too small; we need to allocate a bigger one */
free (shp->BufShp);
shp->ShpBfsz = (this_size * 2) + 1024;
shp->BufShp = malloc (shp->ShpBfsz);
}
/* inserting POLYGON or MULTIPOLYGON in SHX file */
gaiaExport32 (shp->BufShp, shp->ShpSize, GAIA_BIG_ENDIAN, endian_arch); /* exports current SHP file position */
gaiaExport32 (shp->BufShp + 4, this_size, GAIA_BIG_ENDIAN, endian_arch); /* exports entitiy size [in 16 bits words !!!] */
fwrite (shp->BufShp, 1, 8, shp->flShx);
(shp->ShxSize) += 4;
/* inserting POLYGON or MULTIPOLYGON in SHP file */
gaiaExport32 (shp->BufShp, shp->DbfRecno + 1, GAIA_BIG_ENDIAN, endian_arch); /* exports entity ID */
gaiaExport32 (shp->BufShp + 4, this_size, GAIA_BIG_ENDIAN, endian_arch); /* exports entity size [in 16 bits words !!!] */
gaiaExport32 (shp->BufShp + 8, GAIA_SHP_POLYGON, GAIA_LITTLE_ENDIAN, endian_arch); /* exports geometry type = POLYGON */
gaiaExport64 (shp->BufShp + 12, entity->Geometry->MinX, GAIA_LITTLE_ENDIAN, endian_arch); /* exports the MBR for this geometry */
gaiaExport64 (shp->BufShp + 20, entity->Geometry->MinY,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (shp->BufShp + 28, entity->Geometry->MaxX,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (shp->BufShp + 36, entity->Geometry->MaxY,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport32 (shp->BufShp + 44, tot_ln, GAIA_LITTLE_ENDIAN, endian_arch); /* exports # rings in this polygon */
gaiaExport32 (shp->BufShp + 48, tot_v, GAIA_LITTLE_ENDIAN, endian_arch); /* exports total # points */
tot_v = 0; /* resets points counter */
ix = 52; /* sets current buffer offset */
polyg = entity->Geometry->FirstPolygon;
while (polyg)
{
/* exports start point index for each line */
ring = polyg->Exterior; /* this one is the exterior ring */
gaiaExport32 (shp->BufShp + ix, tot_v,
GAIA_LITTLE_ENDIAN, endian_arch);
tot_v += ring->Points;
ix += 4;
for (ib = 0; ib < polyg->NumInteriors; ib++)
{
/* that ones are the interior rings */
ring = polyg->Interiors + ib;
gaiaExport32 (shp->BufShp + ix, tot_v,
GAIA_LITTLE_ENDIAN, endian_arch);
tot_v += ring->Points;
ix += 4;
}
polyg = polyg->Next;
}
polyg = entity->Geometry->FirstPolygon;
while (polyg)
{
/* exports points for each ring */
ring = polyg->Exterior; /* this one is the exterior ring */
for (iv = 0; iv < ring->Points; iv++)
{
/* exports a POINT [x,y] - exterior ring */
if (ring->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (ring->Coords, iv, &x, &y,
&z);
}
else if (ring->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (ring->Coords, iv, &x, &y,
&m);
}
else if (ring->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (ring->Coords, iv, &x,
&y, &z, &m);
}
else
{
gaiaGetPoint (ring->Coords, iv, &x, &y);
}
gaiaExport64 (shp->BufShp + ix, x,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, y,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
}
for (ib = 0; ib < polyg->NumInteriors; ib++)
{
/* that ones are the interior rings */
ring = polyg->Interiors + ib;
for (iv = 0; iv < ring->Points; iv++)
{
/* exports a POINT [x,y] - interior ring */
if (ring->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (ring->Coords, iv,
&x, &y, &z);
}
else if (ring->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (ring->Coords, iv,
&x, &y, &m);
}
else if (ring->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (ring->Coords, iv,
&x, &y, &z, &m);
}
else
{
gaiaGetPoint (ring->Coords, iv, &x, &y);
}
gaiaExport64 (shp->BufShp + ix, x,
GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, y,
GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
}
}
polyg = polyg->Next;
}
fwrite (shp->BufShp, 1, ix, shp->flShp);
(shp->ShpSize) += (ix / 2);
}
if (shp->Shape == GAIA_SHP_POLYGONZ)
{
/* this one is expected to be a POLYGON or a MULTIPOLYGON Z */
gaiaPolygonPtr polyg;
gaiaRingPtr ring;
int ib;
gaiaZRangeGeometry (entity->Geometry, &minZ, &maxZ);
gaiaMRangeGeometry (entity->Geometry, &minM, &maxM);
tot_ln = 0;
tot_v = 0;
polyg = entity->Geometry->FirstPolygon;
while (polyg)
{
/* computes # rings and total # points */
gaiaSaneClockwise (polyg); /* we must assure that exterior ring is clockwise, and interior rings are anti-clockwise */
ring = polyg->Exterior; /* this one is the exterior ring */
tot_v += ring->Points;
tot_ln++;
for (ib = 0; ib < polyg->NumInteriors; ib++)
{
/* that ones are the interior rings */
ring = polyg->Interiors + ib;
tot_v += ring->Points;
tot_ln++;
}
polyg = polyg->Next;
}
if (!tot_ln)
{
strcpy (dummy,
"a POLYGON is expected, but there is no POLYGON in geometry");
if (shp->LastError)
free (shp->LastError);
len = strlen (dummy);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, dummy);
return 0;
}
hasM = 0;
if (shp->EffectiveDims == GAIA_XY_M
|| shp->EffectiveDims == GAIA_XY_Z_M)
hasM = 1;
if (hasM)
this_size = 38 + (2 * tot_ln) + (tot_v * 16); /* size [in 16 bits words !!!] ZM */
else
this_size = 30 + (2 * tot_ln) + (tot_v * 12); /* size [in 16 bits words !!!] Z-only */
if ((this_size * 2) + 1024 > shp->ShpBfsz)
{
/* current buffer is too small; we need to allocate a bigger one */
free (shp->BufShp);
shp->ShpBfsz = (this_size * 2) + 1024;
shp->BufShp = malloc (shp->ShpBfsz);
}
/* inserting POLYGON or MULTIPOLYGON in SHX file */
gaiaExport32 (shp->BufShp, shp->ShpSize, GAIA_BIG_ENDIAN, endian_arch); /* exports current SHP file position */
gaiaExport32 (shp->BufShp + 4, this_size, GAIA_BIG_ENDIAN, endian_arch); /* exports entitiy size [in 16 bits words !!!] */
fwrite (shp->BufShp, 1, 8, shp->flShx);
(shp->ShxSize) += 4;
/* inserting POLYGON or MULTIPOLYGON in SHP file */
gaiaExport32 (shp->BufShp, shp->DbfRecno + 1, GAIA_BIG_ENDIAN, endian_arch); /* exports entity ID */
gaiaExport32 (shp->BufShp + 4, this_size, GAIA_BIG_ENDIAN, endian_arch); /* exports entity size [in 16 bits words !!!] */
gaiaExport32 (shp->BufShp + 8, GAIA_SHP_POLYGONZ, GAIA_LITTLE_ENDIAN, endian_arch); /* exports geometry type = POLYGON Z */
gaiaExport64 (shp->BufShp + 12, entity->Geometry->MinX, GAIA_LITTLE_ENDIAN, endian_arch); /* exports the MBR for this geometry */
gaiaExport64 (shp->BufShp + 20, entity->Geometry->MinY,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (shp->BufShp + 28, entity->Geometry->MaxX,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (shp->BufShp + 36, entity->Geometry->MaxY,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport32 (shp->BufShp + 44, tot_ln, GAIA_LITTLE_ENDIAN, endian_arch); /* exports # rings in this polygon */
gaiaExport32 (shp->BufShp + 48, tot_v, GAIA_LITTLE_ENDIAN, endian_arch); /* exports total # points */
tot_v = 0; /* resets points counter */
ix = 52; /* sets current buffer offset */
polyg = entity->Geometry->FirstPolygon;
while (polyg)
{
/* exports start point index for each line */
ring = polyg->Exterior; /* this one is the exterior ring */
gaiaExport32 (shp->BufShp + ix, tot_v,
GAIA_LITTLE_ENDIAN, endian_arch);
tot_v += ring->Points;
ix += 4;
for (ib = 0; ib < polyg->NumInteriors; ib++)
{
/* that ones are the interior rings */
ring = polyg->Interiors + ib;
gaiaExport32 (shp->BufShp + ix, tot_v,
GAIA_LITTLE_ENDIAN, endian_arch);
tot_v += ring->Points;
ix += 4;
}
polyg = polyg->Next;
}
polyg = entity->Geometry->FirstPolygon;
while (polyg)
{
/* exports points for each ring */
ring = polyg->Exterior; /* this one is the exterior ring */
for (iv = 0; iv < ring->Points; iv++)
{
/* exports a POINT [x,y] - exterior ring */
if (ring->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (ring->Coords, iv, &x, &y,
&z);
}
else if (ring->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (ring->Coords, iv, &x, &y,
&m);
}
else if (ring->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (ring->Coords, iv, &x,
&y, &z, &m);
}
else
{
gaiaGetPoint (ring->Coords, iv, &x, &y);
}
gaiaExport64 (shp->BufShp + ix, x,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, y,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
}
for (ib = 0; ib < polyg->NumInteriors; ib++)
{
/* that ones are the interior rings */
ring = polyg->Interiors + ib;
for (iv = 0; iv < ring->Points; iv++)
{
/* exports a POINT [x,y] - interior ring */
if (ring->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (ring->Coords, iv,
&x, &y, &z);
}
else if (ring->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (ring->Coords, iv,
&x, &y, &m);
}
else if (ring->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (ring->Coords, iv,
&x, &y, &z, &m);
}
else
{
gaiaGetPoint (ring->Coords, iv, &x, &y);
}
gaiaExport64 (shp->BufShp + ix, x,
GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, y,
GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
}
}
polyg = polyg->Next;
}
/* exporting the Z-range [min/max] */
gaiaExport64 (shp->BufShp + ix, minZ, GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, maxZ, GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
polyg = entity->Geometry->FirstPolygon;
while (polyg)
{
/* exports Z-values for each ring */
ring = polyg->Exterior; /* this one is the exterior ring */
for (iv = 0; iv < ring->Points; iv++)
{
/* exports Z-values - exterior ring */
z = 0.0;
if (ring->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (ring->Coords, iv, &x, &y,
&z);
}
else if (ring->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (ring->Coords, iv, &x, &y,
&m);
}
else if (ring->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (ring->Coords, iv, &x,
&y, &z, &m);
}
else
{
gaiaGetPoint (ring->Coords, iv, &x, &y);
}
gaiaExport64 (shp->BufShp + ix, z,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
}
for (ib = 0; ib < polyg->NumInteriors; ib++)
{
/* that ones are the interior rings */
ring = polyg->Interiors + ib;
for (iv = 0; iv < ring->Points; iv++)
{
/* exports Z-values - interior ring */
z = 0.0;
if (ring->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (ring->Coords, iv,
&x, &y, &z);
}
else if (ring->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (ring->Coords, iv,
&x, &y, &m);
}
else if (ring->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (ring->Coords, iv,
&x, &y, &z, &m);
}
else
{
gaiaGetPoint (ring->Coords, iv, &x, &y);
}
gaiaExport64 (shp->BufShp + ix, z,
GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
}
}
polyg = polyg->Next;
}
if (hasM)
{
/* exporting the M-range [min/max] */
gaiaExport64 (shp->BufShp + ix, minM,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, maxM,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
polyg = entity->Geometry->FirstPolygon;
while (polyg)
{
/* exports M-values for each ring */
ring = polyg->Exterior; /* this one is the exterior ring */
for (iv = 0; iv < ring->Points; iv++)
{
/* exports M-values - exterior ring */
m = 0.0;
if (ring->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (ring->Coords, iv,
&x, &y, &z);
}
else if (ring->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (ring->Coords, iv,
&x, &y, &m);
}
else if (ring->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (ring->Coords, iv,
&x, &y, &z, &m);
}
else
{
gaiaGetPoint (ring->Coords, iv, &x, &y);
}
gaiaExport64 (shp->BufShp + ix, m,
GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
}
for (ib = 0; ib < polyg->NumInteriors; ib++)
{
/* that ones are the interior rings */
ring = polyg->Interiors + ib;
for (iv = 0; iv < ring->Points; iv++)
{
/* exports M-values - interior ring */
m = 0.0;
if (ring->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (ring->Coords,
iv, &x, &y, &z);
}
else if (ring->DimensionModel ==
GAIA_XY_M)
{
gaiaGetPointXYM (ring->Coords,
iv, &x, &y, &m);
}
else if (ring->DimensionModel ==
GAIA_XY_Z_M)
{
gaiaGetPointXYZM (ring->Coords,
iv, &x, &y, &z,
&m);
}
else
{
gaiaGetPoint (ring->Coords,
iv, &x, &y);
}
gaiaExport64 (shp->BufShp + ix, m,
GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
}
}
polyg = polyg->Next;
}
}
fwrite (shp->BufShp, 1, ix, shp->flShp);
(shp->ShpSize) += (ix / 2);
}
if (shp->Shape == GAIA_SHP_POLYGONM)
{
/* this one is expected to be a POLYGON or a MULTIPOLYGON M */
gaiaPolygonPtr polyg;
gaiaRingPtr ring;
int ib;
gaiaMRangeGeometry (entity->Geometry, &minM, &maxM);
tot_ln = 0;
tot_v = 0;
polyg = entity->Geometry->FirstPolygon;
while (polyg)
{
/* computes # rings and total # points */
gaiaSaneClockwise (polyg); /* we must assure that exterior ring is clockwise, and interior rings are anti-clockwise */
ring = polyg->Exterior; /* this one is the exterior ring */
tot_v += ring->Points;
tot_ln++;
for (ib = 0; ib < polyg->NumInteriors; ib++)
{
/* that ones are the interior rings */
ring = polyg->Interiors + ib;
tot_v += ring->Points;
tot_ln++;
}
polyg = polyg->Next;
}
if (!tot_ln)
{
strcpy (dummy,
"a POLYGON is expected, but there is no POLYGON in geometry");
if (shp->LastError)
free (shp->LastError);
len = strlen (dummy);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, dummy);
return 0;
}
this_size = 30 + (2 * tot_ln) + (tot_v * 12); /* size [in 16 bits words !!!] for this SHP entity */
if ((this_size * 2) + 1024 > shp->ShpBfsz)
{
/* current buffer is too small; we need to allocate a bigger one */
free (shp->BufShp);
shp->ShpBfsz = (this_size * 2) + 1024;
shp->BufShp = malloc (shp->ShpBfsz);
}
/* inserting POLYGON or MULTIPOLYGON in SHX file */
gaiaExport32 (shp->BufShp, shp->ShpSize, GAIA_BIG_ENDIAN, endian_arch); /* exports current SHP file position */
gaiaExport32 (shp->BufShp + 4, this_size, GAIA_BIG_ENDIAN, endian_arch); /* exports entitiy size [in 16 bits words !!!] */
fwrite (shp->BufShp, 1, 8, shp->flShx);
(shp->ShxSize) += 4;
/* inserting POLYGON or MULTIPOLYGON in SHP file */
gaiaExport32 (shp->BufShp, shp->DbfRecno + 1, GAIA_BIG_ENDIAN, endian_arch); /* exports entity ID */
gaiaExport32 (shp->BufShp + 4, this_size, GAIA_BIG_ENDIAN, endian_arch); /* exports entity size [in 16 bits words !!!] */
gaiaExport32 (shp->BufShp + 8, GAIA_SHP_POLYGONM, GAIA_LITTLE_ENDIAN, endian_arch); /* exports geometry type = POLYGON M */
gaiaExport64 (shp->BufShp + 12, entity->Geometry->MinX, GAIA_LITTLE_ENDIAN, endian_arch); /* exports the MBR for this geometry */
gaiaExport64 (shp->BufShp + 20, entity->Geometry->MinY,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (shp->BufShp + 28, entity->Geometry->MaxX,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (shp->BufShp + 36, entity->Geometry->MaxY,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport32 (shp->BufShp + 44, tot_ln, GAIA_LITTLE_ENDIAN, endian_arch); /* exports # rings in this polygon */
gaiaExport32 (shp->BufShp + 48, tot_v, GAIA_LITTLE_ENDIAN, endian_arch); /* exports total # points */
tot_v = 0; /* resets points counter */
ix = 52; /* sets current buffer offset */
polyg = entity->Geometry->FirstPolygon;
while (polyg)
{
/* exports start point index for each line */
ring = polyg->Exterior; /* this one is the exterior ring */
gaiaExport32 (shp->BufShp + ix, tot_v,
GAIA_LITTLE_ENDIAN, endian_arch);
tot_v += ring->Points;
ix += 4;
for (ib = 0; ib < polyg->NumInteriors; ib++)
{
/* that ones are the interior rings */
ring = polyg->Interiors + ib;
gaiaExport32 (shp->BufShp + ix, tot_v,
GAIA_LITTLE_ENDIAN, endian_arch);
tot_v += ring->Points;
ix += 4;
}
polyg = polyg->Next;
}
polyg = entity->Geometry->FirstPolygon;
while (polyg)
{
/* exports points for each ring */
ring = polyg->Exterior; /* this one is the exterior ring */
for (iv = 0; iv < ring->Points; iv++)
{
/* exports a POINT [x,y] - exterior ring */
if (ring->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (ring->Coords, iv, &x, &y,
&z);
}
else if (ring->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (ring->Coords, iv, &x, &y,
&m);
}
else if (ring->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (ring->Coords, iv, &x,
&y, &z, &m);
}
else
{
gaiaGetPoint (ring->Coords, iv, &x, &y);
}
gaiaExport64 (shp->BufShp + ix, x,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, y,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
}
for (ib = 0; ib < polyg->NumInteriors; ib++)
{
/* that ones are the interior rings */
ring = polyg->Interiors + ib;
for (iv = 0; iv < ring->Points; iv++)
{
/* exports a POINT [x,y] - interior ring */
if (ring->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (ring->Coords, iv,
&x, &y, &z);
}
else if (ring->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (ring->Coords, iv,
&x, &y, &m);
}
else if (ring->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (ring->Coords, iv,
&x, &y, &z, &m);
}
else
{
gaiaGetPoint (ring->Coords, iv, &x, &y);
}
gaiaExport64 (shp->BufShp + ix, x,
GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, y,
GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
}
}
polyg = polyg->Next;
}
/* exporting the M-range [min/max] */
gaiaExport64 (shp->BufShp + ix, minM, GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, maxM, GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
polyg = entity->Geometry->FirstPolygon;
while (polyg)
{
/* exports M-values for each ring */
ring = polyg->Exterior; /* this one is the exterior ring */
for (iv = 0; iv < ring->Points; iv++)
{
/* exports M-values - exterior ring */
m = 0.0;
if (ring->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (ring->Coords, iv, &x, &y,
&z);
}
else if (ring->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (ring->Coords, iv, &x, &y,
&m);
}
else if (ring->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (ring->Coords, iv, &x,
&y, &z, &m);
}
else
{
gaiaGetPoint (ring->Coords, iv, &x, &y);
}
gaiaExport64 (shp->BufShp + ix, m,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
}
for (ib = 0; ib < polyg->NumInteriors; ib++)
{
/* that ones are the interior rings */
ring = polyg->Interiors + ib;
for (iv = 0; iv < ring->Points; iv++)
{
/* exports M-values - interior ring */
m = 0.0;
if (ring->DimensionModel == GAIA_XY_Z)
{
gaiaGetPointXYZ (ring->Coords, iv,
&x, &y, &z);
}
else if (ring->DimensionModel == GAIA_XY_M)
{
gaiaGetPointXYM (ring->Coords, iv,
&x, &y, &m);
}
else if (ring->DimensionModel == GAIA_XY_Z_M)
{
gaiaGetPointXYZM (ring->Coords, iv,
&x, &y, &z, &m);
}
else
{
gaiaGetPoint (ring->Coords, iv, &x, &y);
}
gaiaExport64 (shp->BufShp + ix, m,
GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
}
}
polyg = polyg->Next;
}
fwrite (shp->BufShp, 1, ix, shp->flShp);
(shp->ShpSize) += (ix / 2);
}
if (shp->Shape == GAIA_SHP_MULTIPOINT)
{
/* this one is expected to be a MULTIPOINT */
gaiaPointPtr pt;
tot_pts = 0;
pt = entity->Geometry->FirstPoint;
while (pt)
{
/* computes # points */
tot_pts++;
pt = pt->Next;
}
if (!tot_pts)
{
strcpy (dummy,
"a MULTIPOINT is expected, but there is no POINT/MULTIPOINT in geometry");
if (shp->LastError)
free (shp->LastError);
len = strlen (dummy);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, dummy);
return 0;
}
this_size = 20 + (tot_pts * 8); /* size [in 16 bits words !!!] for this SHP entity */
if ((this_size * 2) + 1024 > shp->ShpBfsz)
{
/* current buffer is too small; we need to allocate a bigger one */
free (shp->BufShp);
shp->ShpBfsz = (this_size * 2) + 1024;
shp->BufShp = malloc (shp->ShpBfsz);
}
/* inserting MULTIPOINT in SHX file */
gaiaExport32 (shp->BufShp, shp->ShpSize, GAIA_BIG_ENDIAN, endian_arch); /* exports current SHP file position */
gaiaExport32 (shp->BufShp + 4, this_size, GAIA_BIG_ENDIAN, endian_arch); /* exports entitiy size [in 16 bits words !!!] */
fwrite (shp->BufShp, 1, 8, shp->flShx);
(shp->ShxSize) += 4;
/* inserting MULTIPOINT in SHP file */
gaiaExport32 (shp->BufShp, shp->DbfRecno + 1, GAIA_BIG_ENDIAN, endian_arch); /* exports entity ID */
gaiaExport32 (shp->BufShp + 4, this_size, GAIA_BIG_ENDIAN, endian_arch); /* exports entity size [in 16 bits words !!!] */
gaiaExport32 (shp->BufShp + 8, GAIA_SHP_MULTIPOINT, GAIA_LITTLE_ENDIAN, endian_arch); /* exports geometry type = MULTIPOINT */
gaiaExport64 (shp->BufShp + 12, entity->Geometry->MinX, GAIA_LITTLE_ENDIAN, endian_arch); /* exports the MBR for this geometry */
gaiaExport64 (shp->BufShp + 20, entity->Geometry->MinY,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (shp->BufShp + 28, entity->Geometry->MaxX,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (shp->BufShp + 36, entity->Geometry->MaxY,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport32 (shp->BufShp + 44, tot_pts, GAIA_LITTLE_ENDIAN, endian_arch); /* exports total # points */
ix = 48; /* sets current buffer offset */
pt = entity->Geometry->FirstPoint;
while (pt)
{
/* exports each point */
gaiaExport64 (shp->BufShp + ix, pt->X,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, pt->Y,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
pt = pt->Next;
}
fwrite (shp->BufShp, 1, ix, shp->flShp);
(shp->ShpSize) += (ix / 2); /* updating current SHP file position [in 16 bits words !!!] */
}
if (shp->Shape == GAIA_SHP_MULTIPOINTZ)
{
/* this one is expected to be a MULTIPOINT Z */
gaiaPointPtr pt;
gaiaZRangeGeometry (entity->Geometry, &minZ, &maxZ);
gaiaMRangeGeometry (entity->Geometry, &minM, &maxM);
tot_pts = 0;
pt = entity->Geometry->FirstPoint;
while (pt)
{
/* computes # points */
tot_pts++;
pt = pt->Next;
}
if (!tot_pts)
{
strcpy (dummy,
"a MULTIPOINT is expected, but there is no POINT/MULTIPOINT in geometry");
if (shp->LastError)
free (shp->LastError);
len = strlen (dummy);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, dummy);
return 0;
}
hasM = 0;
if (shp->EffectiveDims == GAIA_XY_M
|| shp->EffectiveDims == GAIA_XY_Z_M)
hasM = 1;
if (hasM)
this_size = 36 + (tot_pts * 16); /* size [in 16 bits words !!!] ZM */
else
this_size = 28 + (tot_pts * 12); /* size [in 16 bits words !!!] Z-only */
if ((this_size * 2) + 1024 > shp->ShpBfsz)
{
/* current buffer is too small; we need to allocate a bigger one */
free (shp->BufShp);
shp->ShpBfsz = (this_size * 2) + 1024;
shp->BufShp = malloc (shp->ShpBfsz);
}
/* inserting MULTIPOINT in SHX file */
gaiaExport32 (shp->BufShp, shp->ShpSize, GAIA_BIG_ENDIAN, endian_arch); /* exports current SHP file position */
gaiaExport32 (shp->BufShp + 4, this_size, GAIA_BIG_ENDIAN, endian_arch); /* exports entitiy size [in 16 bits words !!!] */
fwrite (shp->BufShp, 1, 8, shp->flShx);
(shp->ShxSize) += 4;
/* inserting MULTIPOINT in SHP file */
gaiaExport32 (shp->BufShp, shp->DbfRecno + 1, GAIA_BIG_ENDIAN, endian_arch); /* exports entity ID */
gaiaExport32 (shp->BufShp + 4, this_size, GAIA_BIG_ENDIAN, endian_arch); /* exports entity size [in 16 bits words !!!] */
gaiaExport32 (shp->BufShp + 8, GAIA_SHP_MULTIPOINTZ, GAIA_LITTLE_ENDIAN, endian_arch); /* exports geometry type = MULTIPOINT Z */
gaiaExport64 (shp->BufShp + 12, entity->Geometry->MinX, GAIA_LITTLE_ENDIAN, endian_arch); /* exports the MBR for this geometry */
gaiaExport64 (shp->BufShp + 20, entity->Geometry->MinY,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (shp->BufShp + 28, entity->Geometry->MaxX,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (shp->BufShp + 36, entity->Geometry->MaxY,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport32 (shp->BufShp + 44, tot_pts, GAIA_LITTLE_ENDIAN, endian_arch); /* exports total # points */
ix = 48; /* sets current buffer offset */
pt = entity->Geometry->FirstPoint;
while (pt)
{
/* exports each point */
gaiaExport64 (shp->BufShp + ix, pt->X,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, pt->Y,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
pt = pt->Next;
}
/* exporting the Z-range [min/max] */
gaiaExport64 (shp->BufShp + ix, minZ, GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, maxZ, GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
pt = entity->Geometry->FirstPoint;
while (pt)
{
/* exports Z-values */
gaiaExport64 (shp->BufShp + ix, pt->Z,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
pt = pt->Next;
}
if (hasM)
{
/* exporting the M-range [min/max] */
gaiaExport64 (shp->BufShp + ix, minM,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, maxM,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
pt = entity->Geometry->FirstPoint;
while (pt)
{
/* exports M-values */
gaiaExport64 (shp->BufShp + ix, pt->M,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
pt = pt->Next;
}
}
fwrite (shp->BufShp, 1, ix, shp->flShp);
(shp->ShpSize) += (ix / 2); /* updating current SHP file position [in 16 bits words !!!] */
}
if (shp->Shape == GAIA_SHP_MULTIPOINTM)
{
/* this one is expected to be a MULTIPOINT M */
gaiaPointPtr pt;
gaiaMRangeGeometry (entity->Geometry, &minM, &maxM);
tot_pts = 0;
pt = entity->Geometry->FirstPoint;
while (pt)
{
/* computes # points */
tot_pts++;
pt = pt->Next;
}
if (!tot_pts)
{
strcpy (dummy,
"a MULTIPOINT is expected, but there is no POINT/MULTIPOINT in geometry");
if (shp->LastError)
free (shp->LastError);
len = strlen (dummy);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, dummy);
return 0;
}
this_size = 28 + (tot_pts * 12); /* size [in 16 bits words !!!] for this SHP entity */
if ((this_size * 2) + 1024 > shp->ShpBfsz)
{
/* current buffer is too small; we need to allocate a bigger one */
free (shp->BufShp);
shp->ShpBfsz = (this_size * 2) + 1024;
shp->BufShp = malloc (shp->ShpBfsz);
}
/* inserting MULTIPOINT in SHX file */
gaiaExport32 (shp->BufShp, shp->ShpSize, GAIA_BIG_ENDIAN, endian_arch); /* exports current SHP file position */
gaiaExport32 (shp->BufShp + 4, this_size, GAIA_BIG_ENDIAN, endian_arch); /* exports entitiy size [in 16 bits words !!!] */
fwrite (shp->BufShp, 1, 8, shp->flShx);
(shp->ShxSize) += 4;
/* inserting MULTIPOINT in SHP file */
gaiaExport32 (shp->BufShp, shp->DbfRecno + 1, GAIA_BIG_ENDIAN, endian_arch); /* exports entity ID */
gaiaExport32 (shp->BufShp + 4, this_size, GAIA_BIG_ENDIAN, endian_arch); /* exports entity size [in 16 bits words !!!] */
gaiaExport32 (shp->BufShp + 8, GAIA_SHP_MULTIPOINTM, GAIA_LITTLE_ENDIAN, endian_arch); /* exports geometry type = MULTIPOINT M */
gaiaExport64 (shp->BufShp + 12, entity->Geometry->MinX, GAIA_LITTLE_ENDIAN, endian_arch); /* exports the MBR for this geometry */
gaiaExport64 (shp->BufShp + 20, entity->Geometry->MinY,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (shp->BufShp + 28, entity->Geometry->MaxX,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (shp->BufShp + 36, entity->Geometry->MaxY,
GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport32 (shp->BufShp + 44, tot_pts, GAIA_LITTLE_ENDIAN, endian_arch); /* exports total # points */
ix = 48; /* sets current buffer offset */
pt = entity->Geometry->FirstPoint;
while (pt)
{
/* exports each point */
gaiaExport64 (shp->BufShp + ix, pt->X,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, pt->Y,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
pt = pt->Next;
}
/* exporting the M-range [min/max] */
gaiaExport64 (shp->BufShp + ix, minM, GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
gaiaExport64 (shp->BufShp + ix, maxM, GAIA_LITTLE_ENDIAN,
endian_arch);
ix += 8;
pt = entity->Geometry->FirstPoint;
while (pt)
{
/* exports M-values */
gaiaExport64 (shp->BufShp + ix, pt->M,
GAIA_LITTLE_ENDIAN, endian_arch);
ix += 8;
pt = pt->Next;
}
fwrite (shp->BufShp, 1, ix, shp->flShp);
(shp->ShpSize) += (ix / 2); /* updating current SHP file position [in 16 bits words !!!] */
}
}
/* inserting entity in DBF file */
fwrite (shp->BufDbf, 1, shp->DbfReclen, shp->flDbf);
(shp->DbfRecno)++;
return 1;
conversion_error:
if (shp->LastError)
free (shp->LastError);
sprintf (dummy, "Invalid character sequence");
len = strlen (dummy);
shp->LastError = malloc (len + 1);
strcpy (shp->LastError, dummy);
return 0;
}
GAIAGEO_DECLARE void
gaiaFlushShpHeaders (gaiaShapefilePtr shp)
{
/* updates the various file headers */
FILE *fl_shp = shp->flShp;
FILE *fl_shx = shp->flShx;
FILE *fl_dbf = shp->flDbf;
int shp_size = shp->ShpSize;
int shx_size = shp->ShxSize;
int dbf_size = shp->DbfSize;
int dbf_reclen = shp->DbfReclen;
int dbf_recno = shp->DbfRecno;
int endian_arch = shp->endian_arch;
double minx = shp->MinX;
double miny = shp->MinY;
double maxx = shp->MaxX;
double maxy = shp->MaxY;
unsigned char *buf_shp = shp->BufShp;
/* writing the SHP file header */
fseek (fl_shp, 0, SEEK_SET); /* repositioning at SHP file start */
gaiaExport32 (buf_shp, 9994, GAIA_BIG_ENDIAN, endian_arch); /* SHP magic number */
gaiaExport32 (buf_shp + 4, 0, GAIA_BIG_ENDIAN, endian_arch);
gaiaExport32 (buf_shp + 8, 0, GAIA_BIG_ENDIAN, endian_arch);
gaiaExport32 (buf_shp + 12, 0, GAIA_BIG_ENDIAN, endian_arch);
gaiaExport32 (buf_shp + 16, 0, GAIA_BIG_ENDIAN, endian_arch);
gaiaExport32 (buf_shp + 20, 0, GAIA_BIG_ENDIAN, endian_arch);
gaiaExport32 (buf_shp + 24, shp_size, GAIA_BIG_ENDIAN, endian_arch); /* SHP file size - measured in 16 bits words !!! */
gaiaExport32 (buf_shp + 28, 1000, GAIA_LITTLE_ENDIAN, endian_arch); /* version */
gaiaExport32 (buf_shp + 32, shp->Shape, GAIA_LITTLE_ENDIAN, endian_arch); /* ESRI shape */
gaiaExport64 (buf_shp + 36, minx, GAIA_LITTLE_ENDIAN, endian_arch); /* the MBR/BBOX for the whole shapefile */
gaiaExport64 (buf_shp + 44, miny, GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (buf_shp + 52, maxx, GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (buf_shp + 60, maxy, GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (buf_shp + 68, 0.0, GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (buf_shp + 76, 0.0, GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (buf_shp + 84, 0.0, GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (buf_shp + 92, 0.0, GAIA_LITTLE_ENDIAN, endian_arch);
fwrite (buf_shp, 1, 100, fl_shp);
/* writing the SHX file header */
fseek (fl_shx, 0, SEEK_SET); /* repositioning at SHX file start */
gaiaExport32 (buf_shp, 9994, GAIA_BIG_ENDIAN, endian_arch); /* SHP magic number */
gaiaExport32 (buf_shp + 4, 0, GAIA_BIG_ENDIAN, endian_arch);
gaiaExport32 (buf_shp + 8, 0, GAIA_BIG_ENDIAN, endian_arch);
gaiaExport32 (buf_shp + 12, 0, GAIA_BIG_ENDIAN, endian_arch);
gaiaExport32 (buf_shp + 16, 0, GAIA_BIG_ENDIAN, endian_arch);
gaiaExport32 (buf_shp + 20, 0, GAIA_BIG_ENDIAN, endian_arch);
gaiaExport32 (buf_shp + 24, shx_size, GAIA_BIG_ENDIAN, endian_arch); /* SHXfile size - measured in 16 bits words !!! */
gaiaExport32 (buf_shp + 28, 1000, GAIA_LITTLE_ENDIAN, endian_arch); /* version */
gaiaExport32 (buf_shp + 32, shp->Shape, GAIA_LITTLE_ENDIAN, endian_arch); /* ESRI shape */
gaiaExport64 (buf_shp + 36, minx, GAIA_LITTLE_ENDIAN, endian_arch); /* the MBR for the whole shapefile */
gaiaExport64 (buf_shp + 44, miny, GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (buf_shp + 52, maxx, GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (buf_shp + 60, maxy, GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (buf_shp + 68, 0.0, GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (buf_shp + 76, 0.0, GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (buf_shp + 84, 0.0, GAIA_LITTLE_ENDIAN, endian_arch);
gaiaExport64 (buf_shp + 92, 0.0, GAIA_LITTLE_ENDIAN, endian_arch);
fwrite (buf_shp, 1, 100, fl_shx);
/* writing the DBF file header */
*buf_shp = 0x1a; /* DBF - this is theEOF marker */
fwrite (buf_shp, 1, 1, fl_dbf);
fseek (fl_dbf, 0, SEEK_SET); /* repositioning at DBF file start */
memset (buf_shp, '\0', 32);
*buf_shp = 0x03; /* DBF magic number */
*(buf_shp + 1) = 1; /* this is supposed to be the last update date [Year, Month, Day], but we ignore it at all */
*(buf_shp + 2) = 1;
*(buf_shp + 3) = 1;
gaiaExport32 (buf_shp + 4, dbf_recno, GAIA_LITTLE_ENDIAN, endian_arch); /* exports # records in this DBF */
gaiaExport16 (buf_shp + 8, (short) dbf_size, GAIA_LITTLE_ENDIAN, endian_arch); /* exports the file header size */
gaiaExport16 (buf_shp + 10, (short) dbf_reclen, GAIA_LITTLE_ENDIAN, endian_arch); /* exports the record length */
fwrite (buf_shp, 1, 32, fl_dbf);
}
GAIAGEO_DECLARE void
gaiaShpAnalyze (gaiaShapefilePtr shp)
{
/* analyzing the SHP content, in order to detect if there are LINESTRINGS or MULTILINESTRINGS
/ the same check is needed in order to detect if there are POLYGONS or MULTIPOLYGONS
*/
unsigned char buf[512];
int rd;
int skpos;
int offset;
int off_shp;
int sz;
int shape;
int points;
int n;
int n1;
int base;
int start;
int end;
int iv;
int ind;
double x;
double y;
int polygons;
int ZM_size;
int multi = 0;
int hasM = 0;
int current_row = 0;
gaiaRingPtr ring = NULL;
while (1)
{
/* positioning and reading the SHX file */
offset = 100 + (current_row * 8); /* 100 bytes for the header + current row displacement; each SHX row = 8 bytes */
skpos = fseek (shp->flShx, offset, SEEK_SET);
if (skpos != 0)
goto exit;
rd = fread (buf, sizeof (unsigned char), 8, shp->flShx);
if (rd != 8)
goto exit;
off_shp = gaiaImport32 (buf, GAIA_BIG_ENDIAN, shp->endian_arch);
/* positioning and reading corresponding SHP entity - geometry */
offset = off_shp * 2;
skpos = fseek (shp->flShp, offset, SEEK_SET);
if (skpos != 0)
goto exit;
rd = fread (buf, sizeof (unsigned char), 12, shp->flShp);
if (rd != 12)
goto exit;
sz = gaiaImport32 (buf + 4, GAIA_BIG_ENDIAN, shp->endian_arch);
shape = gaiaImport32 (buf + 8, GAIA_LITTLE_ENDIAN, shp->endian_arch);
if ((sz * 2) > shp->ShpBfsz)
{
/* current buffer is too small; we need to allocate a bigger buffer */
free (shp->BufShp);
shp->ShpBfsz = sz * 2;
shp->BufShp = malloc (sizeof (unsigned char) * shp->ShpBfsz);
}
if (shape == GAIA_SHP_POLYLINE || shape == GAIA_SHP_POLYLINEZ
|| shape == GAIA_SHP_POLYLINEM)
{
/* shape polyline */
rd = fread (shp->BufShp, sizeof (unsigned char), 32,
shp->flShp);
if (rd != 32)
goto exit;
rd = fread (shp->BufShp, sizeof (unsigned char),
(sz * 2) - 36, shp->flShp);
if (rd != (sz * 2) - 36)
goto exit;
n = gaiaImport32 (shp->BufShp, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
n1 = gaiaImport32 (shp->BufShp + 4, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
if (n > 1)
multi++;
if (shape == GAIA_SHP_POLYLINEZ)
{
ZM_size = 38 + (2 * n) + (n1 * 16); /* size [in 16 bits words !!!] ZM */
if (sz == ZM_size)
hasM = 1;
}
}
if (shape == GAIA_SHP_POLYGON || shape == GAIA_SHP_POLYGONZ
|| shape == GAIA_SHP_POLYGONM)
{
/* shape polygon */
struct shp_ring_item *pExt;
struct shp_ring_collection ringsColl;
/* initializing the RING collection */
ringsColl.First = NULL;
ringsColl.Last = NULL;
rd = fread (shp->BufShp, sizeof (unsigned char), 32,
shp->flShp);
if (rd != 32)
goto exit;
rd = fread (shp->BufShp, sizeof (unsigned char),
(sz * 2) - 36, shp->flShp);
if (rd != (sz * 2) - 36)
goto exit;
n = gaiaImport32 (shp->BufShp, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
n1 = gaiaImport32 (shp->BufShp + 4, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
base = 8 + (n * 4);
start = 0;
for (ind = 0; ind < n; ind++)
{
if (ind < (n - 1))
end =
gaiaImport32 (shp->BufShp + 8 +
((ind + 1) * 4),
GAIA_LITTLE_ENDIAN,
shp->endian_arch);
else
end = n1;
points = end - start;
ring = gaiaAllocRing (points);
points = 0;
for (iv = start; iv < end; iv++)
{
x = gaiaImport64 (shp->BufShp + base +
(iv * 16), GAIA_LITTLE_ENDIAN,
shp->endian_arch);
y = gaiaImport64 (shp->BufShp + base +
(iv * 16) + 8,
GAIA_LITTLE_ENDIAN,
shp->endian_arch);
gaiaSetPoint (ring->Coords, points, x, y);
start++;
points++;
}
shp_add_ring (&ringsColl, ring);
ring = NULL;
}
shp_arrange_rings (&ringsColl);
pExt = ringsColl.First;
polygons = 0;
while (pExt != NULL)
{
if (pExt->IsExterior)
polygons++;
pExt = pExt->Next;
}
shp_free_rings (&ringsColl);
if (polygons > 1)
multi++;
if (shape == GAIA_SHP_POLYGONZ)
{
ZM_size = 38 + (2 * n) + (n1 * 16); /* size [in 16 bits words !!!] ZM */
if (sz == ZM_size)
hasM = 1;
}
}
if (shape == GAIA_SHP_MULTIPOINTZ)
{
/* shape multipoint Z */
rd = fread (shp->BufShp, sizeof (unsigned char), 32,
shp->flShp);
if (rd != 32)
goto exit;
rd = fread (shp->BufShp, sizeof (unsigned char),
(sz * 2) - 36, shp->flShp);
if (rd != (sz * 2) - 36)
goto exit;
n = gaiaImport32 (shp->BufShp, GAIA_LITTLE_ENDIAN,
shp->endian_arch);
ZM_size = 38 + (n * 16); /* size [in 16 bits words !!!] ZM */
if (sz == ZM_size)
hasM = 1;
}
current_row++;
}
exit:
if (ring)
gaiaFreeRing (ring);
if (shp->LastError)
free (shp->LastError);
shp->LastError = NULL;
/* setting the EffectiveType, as determined by this analysis */
if (shp->Shape == GAIA_SHP_POLYLINE || shp->Shape == GAIA_SHP_POLYLINEZ
|| shp->Shape == GAIA_SHP_POLYLINEM)
{
/* SHAPE polyline */
if (multi)
shp->EffectiveType = GAIA_MULTILINESTRING;
else
shp->EffectiveType = GAIA_LINESTRING;
}
if (shp->Shape == GAIA_SHP_POLYGON || shp->Shape == GAIA_SHP_POLYGONZ
|| shp->Shape == GAIA_SHP_POLYGONM)
{
/* SHAPE polygon */
if (multi)
shp->EffectiveType = GAIA_MULTIPOLYGON;
else
shp->EffectiveType = GAIA_POLYGON;
}
if (shp->Shape == GAIA_SHP_POLYLINEZ || shp->Shape == GAIA_SHP_POLYGONZ
|| shp->Shape == GAIA_SHP_MULTIPOINTZ)
{
if (hasM)
shp->EffectiveDims = GAIA_XY_Z_M;
else
shp->EffectiveDims = GAIA_XY_Z;
}
}
GAIAGEO_DECLARE gaiaDbfPtr
gaiaAllocDbf ()
{
/* allocates and initializes the DBF object */
gaiaDbfPtr dbf = malloc (sizeof (gaiaDbf));
dbf->endian_arch = 1;
dbf->Path = NULL;
dbf->flDbf = NULL;
dbf->Dbf = NULL;
dbf->BufDbf = NULL;
dbf->DbfHdsz = 0;
dbf->DbfReclen = 0;
dbf->DbfSize = 0;
dbf->DbfRecno = 0;
dbf->Valid = 0;
dbf->IconvObj = NULL;
dbf->LastError = NULL;
return dbf;
}
GAIAGEO_DECLARE void
gaiaFreeDbf (gaiaDbfPtr dbf)
{
/* frees all memory allocations related to the DBF object */
if (dbf->Path)
free (dbf->Path);
if (dbf->flDbf)
fclose (dbf->flDbf);
if (dbf->Dbf)
gaiaFreeDbfList (dbf->Dbf);
if (dbf->BufDbf)
free (dbf->BufDbf);
if (dbf->IconvObj)
iconv_close ((iconv_t) dbf->IconvObj);
if (dbf->LastError)
free (dbf->LastError);
free (dbf);
}
GAIAGEO_DECLARE void
gaiaOpenDbfRead (gaiaDbfPtr dbf, const char *path, const char *charFrom,
const char *charTo)
{
/* trying to open the DBF and initial checkings */
FILE *fl_dbf = NULL;
int rd;
unsigned char bf[1024];
int dbf_size;
int dbf_reclen = 0;
int off_dbf;
int ind;
char field_name[2048];
char *sys_err;
char errMsg[1024];
iconv_t iconv_ret;
char utf8buf[2048];
#if !defined(__MINGW32__) && defined(_WIN32)
const char *pBuf;
#else /* not WIN32 */
char *pBuf;
#endif
size_t len;
size_t utf8len;
char *pUtf8buf;
int endian_arch = gaiaEndianArch ();
gaiaDbfListPtr dbf_list = NULL;
if (charFrom && charTo)
{
iconv_ret = iconv_open (charTo, charFrom);
if (iconv_ret == (iconv_t) (-1))
{
sprintf (errMsg,
"conversion from '%s' to '%s' not available\n",
charFrom, charTo);
goto unsupported_conversion;
}
dbf->IconvObj = iconv_ret;
}
else
{
sprintf (errMsg, "a NULL charset-name was passed\n");
goto unsupported_conversion;
}
if (dbf->flDbf != NULL)
{
sprintf (errMsg, "attempting to reopen an already opened DBF\n");
goto unsupported_conversion;
}
fl_dbf = fopen (path, "rb");
if (!fl_dbf)
{
sys_err = strerror (errno);
sprintf (errMsg, "unable to open '%s' for reading: %s", path,
sys_err);
goto no_file;
}
/* reading DBF file header */
rd = fread (bf, sizeof (unsigned char), 32, fl_dbf);
if (rd != 32)
goto error;
switch (*bf)
{
/* checks the DBF magic number */
case 0x03:
case 0x83:
break;
case 0x02:
case 0xF8:
sprintf (errMsg, "'%s'\ninvalid magic number %02x [FoxBASE format]",
path, *bf);
goto dbf_bad_magic;
case 0xF5:
sprintf (errMsg,
"'%s'\ninvalid magic number %02x [FoxPro 2.x (or earlier) format]",
path, *bf);
goto dbf_bad_magic;
case 0x30:
case 0x31:
case 0x32:
sprintf (errMsg,
"'%s'\ninvalid magic number %02x [Visual FoxPro format]",
path, *bf);
goto dbf_bad_magic;
case 0x43:
case 0x63:
case 0xBB:
case 0xCB:
sprintf (errMsg, "'%s'\ninvalid magic number %02x [dBASE IV format]",
path, *bf);
goto dbf_bad_magic;
default:
sprintf (errMsg, "'%s'\ninvalid magic number %02x [unknown format]",
path, *bf);
goto dbf_bad_magic;
};
dbf_size = gaiaImport16 (bf + 8, GAIA_LITTLE_ENDIAN, endian_arch);
dbf_reclen = gaiaImport16 (bf + 10, GAIA_LITTLE_ENDIAN, endian_arch);
dbf_size--;
off_dbf = 0;
dbf_list = gaiaAllocDbfList ();
for (ind = 32; ind < dbf_size; ind += 32)
{
/* fetches DBF fields definitions */
rd = fread (bf, sizeof (unsigned char), 32, fl_dbf);
if (rd != 32)
goto error;
if (*(bf + 11) == 'M')
{
/* skipping any MEMO field */
memcpy (field_name, bf, 11);
field_name[11] = '\0';
off_dbf += *(bf + 16);
spatialite_e
("WARNING: column \"%s\" is of the MEMO type and will be ignored\n",
field_name);
continue;
}
memcpy (field_name, bf, 11);
field_name[11] = '\0';
len = strlen ((char *) field_name);
utf8len = 2048;
pBuf = (char *) field_name;
pUtf8buf = utf8buf;
if (iconv
((iconv_t) (dbf->IconvObj), &pBuf, &len, &pUtf8buf,
&utf8len) == (size_t) (-1))
goto conversion_error;
memcpy (field_name, utf8buf, 2048 - utf8len);
field_name[2048 - utf8len] = '\0';
gaiaAddDbfField (dbf_list, field_name, *(bf + 11), off_dbf,
*(bf + 16), *(bf + 17));
off_dbf += *(bf + 16);
}
if (!gaiaIsValidDbfList (dbf_list))
{
/* invalid DBF */
goto illegal_dbf;
}
len = strlen (path);
dbf->Path = malloc (len + 1);
strcpy (dbf->Path, path);
dbf->flDbf = fl_dbf;
dbf->Dbf = dbf_list;
/* allocating DBF buffer */
dbf->BufDbf = malloc (sizeof (unsigned char) * dbf_reclen);
dbf->DbfHdsz = dbf_size + 1;
dbf->DbfReclen = dbf_reclen;
dbf->Valid = 1;
dbf->endian_arch = endian_arch;
return;
unsupported_conversion:
/* illegal charset */
if (dbf->LastError)
free (dbf->LastError);
len = strlen (errMsg);
dbf->LastError = malloc (len + 1);
strcpy (dbf->LastError, errMsg);
return;
no_file:
/* the DBF file can't be accessed */
if (dbf->LastError)
free (dbf->LastError);
len = strlen (errMsg);
dbf->LastError = malloc (len + 1);
strcpy (dbf->LastError, errMsg);
if (fl_dbf)
fclose (fl_dbf);
return;
error:
/* the DBF is invalid or corrupted */
if (dbf->LastError)
free (dbf->LastError);
sprintf (errMsg, "'%s' is corrupted / has invalid format", path);
len = strlen (errMsg);
dbf->LastError = malloc (len + 1);
strcpy (dbf->LastError, errMsg);
gaiaFreeDbfList (dbf_list);
fclose (fl_dbf);
return;
dbf_bad_magic:
/* the DBF has an invalid magic number */
if (dbf->LastError)
free (dbf->LastError);
len = strlen (errMsg);
dbf->LastError = malloc (len + 1);
strcpy (dbf->LastError, errMsg);
gaiaFreeDbfList (dbf_list);
fclose (fl_dbf);
return;
illegal_dbf:
/* the DBF-file contains unsupported data types */
if (dbf->LastError)
free (dbf->LastError);
sprintf (errMsg, "'%s' contains unsupported data types", path);
len = strlen (errMsg);
dbf->LastError = malloc (len + 1);
strcpy (dbf->LastError, errMsg);
gaiaFreeDbfList (dbf_list);
if (fl_dbf)
fclose (fl_dbf);
return;
conversion_error:
/* libiconv error */
if (dbf->LastError)
free (dbf->LastError);
sprintf (errMsg, "'%s' field name: invalid character sequence", path);
len = strlen (errMsg);
dbf->LastError = malloc (len + 1);
strcpy (dbf->LastError, errMsg);
gaiaFreeDbfList (dbf_list);
if (fl_dbf)
fclose (fl_dbf);
return;
}
GAIAGEO_DECLARE void
gaiaOpenDbfWrite (gaiaDbfPtr dbf, const char *path, const char *charFrom,
const char *charTo)
{
/* trying to create the DBF file */
FILE *fl_dbf = NULL;
unsigned char bf[1024];
unsigned char *dbf_buf = NULL;
gaiaDbfFieldPtr fld;
char *sys_err;
char errMsg[1024];
short dbf_reclen = 0;
unsigned short dbf_size = 0;
iconv_t iconv_ret;
char buf[2048];
char utf8buf[2048];
#if !defined(__MINGW32__) && defined(_WIN32)
const char *pBuf;
#else /* not WIN32 */
char *pBuf;
#endif
size_t len;
size_t utf8len;
char *pUtf8buf;
int defaultId = 1;
struct auxdbf_list *auxdbf = NULL;
if (charFrom && charTo)
{
iconv_ret = iconv_open (charTo, charFrom);
if (iconv_ret == (iconv_t) (-1))
{
sprintf (errMsg, "conversion from '%s' to '%s' not available\n",
charFrom, charTo);
goto unsupported_conversion;
}
dbf->IconvObj = iconv_ret;
}
else
{
sprintf (errMsg, "a NULL charset-name was passed\n");
goto unsupported_conversion;
}
if (dbf->flDbf != NULL)
{
sprintf (errMsg, "attempting to reopen an already opened DBF file\n");
goto unsupported_conversion;
}
/* trying to open the DBF file */
fl_dbf = fopen (path, "wb");
if (!fl_dbf)
{
sys_err = strerror (errno);
sprintf (errMsg, "unable to open '%s' for writing: %s", path,
sys_err);
goto no_file;
}
/* allocating DBF buffer */
dbf_reclen = 1; /* an extra byte is needed because in DBF rows first byte is a marker for deletion */
fld = dbf->Dbf->First;
while (fld)
{
/* computing the DBF record length */
dbf_reclen += fld->Length;
fld = fld->Next;
}
dbf_buf = malloc (dbf_reclen);
/* writing the DBF file header */
memset (bf, '\0', 32);
fwrite (bf, 1, 32, fl_dbf);
dbf_size = 32; /* note: DBF counts sizes in bytes */
auxdbf = alloc_auxdbf (dbf->Dbf);
fld = dbf->Dbf->First;
while (fld)
{
/* exporting DBF Fields specifications */
memset (bf, 0, 32);
if (strlen (fld->Name) > 10)
{
/* long name: attempting to safely truncate */
truncate_long_name (auxdbf, fld);
}
strcpy (buf, fld->Name);
len = strlen (buf);
utf8len = 2048;
pBuf = buf;
pUtf8buf = utf8buf;
if (iconv
((iconv_t) (dbf->IconvObj), &pBuf, &len, &pUtf8buf,
&utf8len) == (size_t) (-1))
sprintf (buf, "FLD#%d", defaultId++);
else
{
memcpy (buf, utf8buf, 2048 - utf8len);
buf[2048 - utf8len] = '\0';
if (strlen (buf) > 10)
sprintf (buf, "FLD#%d", defaultId++);
}
memcpy (bf, buf, strlen (buf));
*(bf + 11) = fld->Type;
*(bf + 16) = fld->Length;
*(bf + 17) = fld->Decimals;
fwrite (bf, 1, 32, fl_dbf);
dbf_size += 32;
fld = fld->Next;
}
free_auxdbf (auxdbf);
fwrite ("\r", 1, 1, fl_dbf); /* this one is a special DBF delimiter that closes file header */
dbf_size++;
dbf->Valid = 1;
dbf->flDbf = fl_dbf;
dbf->BufDbf = dbf_buf;
dbf->DbfHdsz = dbf_size + 1;
dbf->DbfReclen = dbf_reclen;
dbf->DbfSize = dbf_size;
dbf->DbfRecno = 0;
return;
unsupported_conversion:
/* illegal charset */
if (dbf->LastError)
free (dbf->LastError);
len = strlen (errMsg);
dbf->LastError = malloc (len + 1);
strcpy (dbf->LastError, errMsg);
return;
no_file:
/* the DBF file can't be created/opened */
if (dbf->LastError)
free (dbf->LastError);
len = strlen (errMsg);
dbf->LastError = malloc (len + 1);
strcpy (dbf->LastError, errMsg);
if (dbf_buf)
free (dbf_buf);
if (fl_dbf)
fclose (fl_dbf);
return;
}
GAIAGEO_DECLARE int
gaiaWriteDbfEntity (gaiaDbfPtr dbf, gaiaDbfListPtr entity)
{
/* trying to write an entity into some DBF file */
char dummy[128];
char fmt[16];
gaiaDbfFieldPtr fld;
#if !defined(__MINGW32__) && defined(_WIN32)
const char *pBuf;
#else /* not WIN32 */
char *pBuf;
#endif
size_t len;
size_t utf8len;
char *pUtf8buf;
char *dynbuf;
char utf8buf[2048];
/* writing the DBF record */
memset (dbf->BufDbf, '\0', dbf->DbfReclen);
*(dbf->BufDbf) = ' '; /* in DBF first byte of each row marks for validity or deletion */
fld = entity->First;
while (fld)
{
/* transferring field values */
switch (fld->Type)
{
case 'L':
if (!(fld->Value))
*(dbf->BufDbf + fld->Offset) = '?';
else if (fld->Value->Type != GAIA_INT_VALUE)
*(dbf->BufDbf + fld->Offset + 1) = '?';
else
{
if (fld->Value->IntValue == 0)
*(dbf->BufDbf + fld->Offset + 1) = 'N';
else
*(dbf->BufDbf + fld->Offset + 1) = 'Y';
}
break;
case 'D':
memset (dbf->BufDbf + fld->Offset + 1, '0', 8);
if (fld->Value)
{
if (fld->Value->Type == GAIA_TEXT_VALUE)
{
if (strlen (fld->Value->TxtValue) == 8)
memcpy (dbf->BufDbf + fld->Offset + 1,
fld->Value->TxtValue, 8);
}
}
break;
case 'C':
memset (dbf->BufDbf + fld->Offset + 1, ' ', fld->Length);
if (fld->Value)
{
if (fld->Value->Type == GAIA_TEXT_VALUE)
{
len = strlen (fld->Value->TxtValue);
dynbuf = malloc (len + 1);
strcpy (dynbuf, fld->Value->TxtValue);
if (len > 512)
{
dynbuf[512] = '\0';
len = strlen (dynbuf);
}
utf8len = 2048;
pBuf = dynbuf;
pUtf8buf = utf8buf;
if (iconv
((iconv_t) (dbf->IconvObj), &pBuf, &len,
&pUtf8buf, &utf8len) == (size_t) (-1))
{
free (dynbuf);
goto conversion_error;
}
memcpy (dynbuf, utf8buf, 2048 - utf8len);
dynbuf[2048 - utf8len] = '\0';
if (strlen (dynbuf) < fld->Length)
memcpy (dbf->BufDbf + fld->Offset + 1, dynbuf,
strlen (dynbuf));
else
memcpy (dbf->BufDbf + fld->Offset + 1, dynbuf,
fld->Length);
free (dynbuf);
}
}
break;
case 'N':
memset (dbf->BufDbf + fld->Offset + 1, '\0', fld->Length);
if (fld->Value)
{
if (fld->Value->Type == GAIA_INT_VALUE)
{
sprintf (dummy, FRMT64, fld->Value->IntValue);
if (strlen (dummy) <= fld->Length)
memcpy (dbf->BufDbf + fld->Offset + 1,
dummy, strlen (dummy));
}
if (fld->Value->Type == GAIA_DOUBLE_VALUE)
{
sprintf (fmt, "%%1.%df", fld->Decimals);
sprintf (dummy, fmt, fld->Value->DblValue);
if (strlen (dummy) <= fld->Length)
memcpy (dbf->BufDbf + fld->Offset + 1,
dummy, strlen (dummy));
}
}
break;
};
fld = fld->Next;
}
/* inserting entity in DBF file */
fwrite (dbf->BufDbf, 1, dbf->DbfReclen, dbf->flDbf);
(dbf->DbfRecno)++;
return 1;
conversion_error:
if (dbf->LastError)
free (dbf->LastError);
sprintf (dummy, "Invalid character sequence");
len = strlen (dummy);
dbf->LastError = malloc (len + 1);
strcpy (dbf->LastError, dummy);
return 0;
}
GAIAGEO_DECLARE void
gaiaFlushDbfHeader (gaiaDbfPtr dbf)
{
/* updates the DBF file header */
FILE *fl_dbf = dbf->flDbf;
int dbf_size = dbf->DbfSize;
int dbf_reclen = dbf->DbfReclen;
int dbf_recno = dbf->DbfRecno;
int endian_arch = dbf->endian_arch;
unsigned char bf[64];
/* writing the DBF file header */
*bf = 0x1a; /* DBF - this is theEOF marker */
fwrite (bf, 1, 1, fl_dbf);
fseek (fl_dbf, 0, SEEK_SET); /* repositioning at DBF file start */
memset (bf, '\0', 32);
*bf = 0x03; /* DBF magic number */
*(bf + 1) = 1; /* this is supposed to be the last update date [Year, Month, Day], but we ignore it at all */
*(bf + 2) = 1;
*(bf + 3) = 1;
gaiaExport32 (bf + 4, dbf_recno, GAIA_LITTLE_ENDIAN, endian_arch); /* exports # records in this DBF */
gaiaExport16 (bf + 8, (short) dbf_size, GAIA_LITTLE_ENDIAN, endian_arch); /* exports the file header size */
gaiaExport16 (bf + 10, (short) dbf_reclen, GAIA_LITTLE_ENDIAN, endian_arch); /* exports the record length */
fwrite (bf, 1, 32, fl_dbf);
}
GAIAGEO_DECLARE int
gaiaReadDbfEntity (gaiaDbfPtr dbf, int current_row, int *deleted)
{
return gaiaReadDbfEntity_ex (dbf, current_row, deleted, 0);
}
GAIAGEO_DECLARE int
gaiaReadDbfEntity_ex (gaiaDbfPtr dbf, int current_row, int *deleted,
int text_dates)
{
/* trying to read an entity from DBF */
int rd;
int skpos;
int offset;
int len;
char errMsg[1024];
gaiaDbfFieldPtr pFld;
/* positioning and reading the DBF file */
offset = dbf->DbfHdsz + (current_row * dbf->DbfReclen);
skpos = fseek (dbf->flDbf, offset, SEEK_SET);
if (skpos != 0)
goto eof;
rd = fread (dbf->BufDbf, sizeof (unsigned char), dbf->DbfReclen,
dbf->flDbf);
if (rd != dbf->DbfReclen)
goto eof;
/* setting up the current DBF ENTITY */
gaiaResetDbfEntity (dbf->Dbf);
dbf->Dbf->RowId = current_row;
if (*(dbf->BufDbf) == '*')
{
/* deleted row */
*deleted = 1;
if (dbf->LastError)
free (dbf->LastError);
dbf->LastError = NULL;
return 1;
}
/* fetching the DBF values */
pFld = dbf->Dbf->First;
while (pFld)
{
if (!parseDbfField (dbf->BufDbf, dbf->IconvObj, pFld, text_dates))
goto conversion_error;
pFld = pFld->Next;
}
if (dbf->LastError)
free (dbf->LastError);
dbf->LastError = NULL;
*deleted = 0;
return 1;
eof:
if (dbf->LastError)
free (dbf->LastError);
dbf->LastError = NULL;
return 0;
conversion_error:
if (dbf->LastError)
free (dbf->LastError);
sprintf (errMsg, "Invalid character sequence");
len = strlen (errMsg);
dbf->LastError = malloc (len + 1);
strcpy (dbf->LastError, errMsg);
return 0;
}
#endif /* ICONV enabled/disabled */
|
using Api.Models;
using Microsoft.EntityFrameworkCore;
namespace Api.Data
{
public class TwitterContext : DbContext
{
public TwitterContext(DbContextOptions<TwitterContext> options) : base(options)
{
}
public DbSet<Tweet>? Tweets { get; set; }
public DbSet<User>? Users { get; set; }
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
modelBuilder.Entity<Tweet>().ToTable("Tweet");
modelBuilder.Entity<User>().ToTable("User");
//modelBuilder.Conventions.Remove<PluralizingTableNameConvention>();
}
}
}
|
(defproject com.xn--lgc/pure-conditioning "0.2.0-SNAPSHOT"
:description "A simple, fast, purely functional condition / restart system for Clojure"
:url "https://www.xn--lgc.com/pure-conditioning"
:license {:name "MIT"
:url "https://opensource.org/licenses/MIT"}
:dependencies [[org.clojure/clojure "1.10.2" :scope "provided"]
[potemkin/potemkin "0.4.5"]]
:global-vars {*warn-on-reflection* true}
:plugins [[lein-tools-deps "0.4.3"]
[lein-marginalia "0.9.1"]]
:middleware [lein-tools-deps.plugin/resolve-dependencies-with-deps-edn]
;; Here we show how top level configurations can be merged with
;; configurations in profiles.
;;
;; The default project will include :deps along with :extra-deps
;; defined with the :async alias.
:lein-tools-deps/config {:config-files [:install :user :project]
:resolve-aliases []})
|
# frozen_string_literal: true
RSpec.describe Clavius::Configuration do
subject(:configuration) {
described_class.new do |c|
c.weekdays = weekdays
c.included = included
c.excluded = excluded
end
}
let(:weekdays) { %i[mon wed fri] }
let(:included) { [Date.new(2015, 1, 1), Date.new(2015, 1, 8)] }
let(:excluded) { [Date.new(2015, 1, 2), Date.new(2015, 1, 9)] }
context 'when initialized without a block' do
it 'does not blow up' do
expect { described_class.new }.not_to raise_error
end
end
describe '#weekdays' do
it 'returns the associated wdays' do
expect(configuration.weekdays).to eq Set[1, 3, 5]
end
context 'when nonsensical values are provided' do
let(:weekdays) { %i[hi there tue thu] }
it 'filters them out' do
expect(configuration.weekdays).to eq Set[2, 4]
end
end
context 'when duplicate values are provided' do
let(:weekdays) { %i[mon mon] }
it 'filters them out' do
expect(configuration.weekdays).to eq Set[1]
end
end
context 'when unconfigured' do
subject(:configuration) {
described_class.new do |c|
c.included = included
c.excluded = excluded
end
}
it 'returns the default set of wdays' do
expect(configuration.weekdays).to eq Set[1, 2, 3, 4, 5]
end
end
end
%i[included excluded].each do |dates|
describe "##{dates}" do
it 'returns the configured dates' do
expect(configuration.send(dates)).to eq send(dates).to_set
end
context 'when duplicate dates are provided' do
let(dates) { Array.new(2) { Date.new(2015, 1, 1) } }
it 'filters them out' do
expect(configuration.send(dates)).to eq Set[Date.new(2015, 1, 1)]
end
end
context 'when date-like objects are provided' do
let(dates) { [Date.new(2015, 1, 1), Time.new(2015, 1, 2)] }
it 'converts them to dates' do
expect(configuration.send(dates)).to eq Set[
Date.new(2015, 1, 1),
Date.new(2015, 1, 2)
]
end
end
context 'when un-date-like objects are provided' do
let(dates) { [Date.new(2015, 1, 1), 'date'] }
it 'filters them out' do
expect(configuration.send(dates)).to eq Set[Date.new(2015, 1, 1)]
end
end
context 'when unconfigured' do
subject(:configuration) {
described_class.new do |c| c.weekdays = weekdays end
}
it 'returns an empty set' do
expect(configuration.send(dates)).to eq Set.new
end
end
end
end
end
|
import { tabsTypeAnchor, tabsTypeButton } from './const';
/** tabs union type */
export type TabsType = typeof tabsTypeAnchor | typeof tabsTypeButton;
export interface ITabsItem {
/** tab title */
title: string;
/** optional tab id */
id?: string;
/** optional path for router */
path?: string;
/** active state */
active?: boolean;
/** click callback (used only if tabs are buttons) */
click?: (tab: ITabsItem) => void;
}
|
import MainScreen from './mainScreen/MainScreen';
import SecondScreen from './secondScreen/SecondScreen';
export { MainScreen, SecondScreen };
|
\! echo 'Articles -80 be_read'
USE articles/-80;
SELECT * FROM be_read LIMIT 20;
|
CREATE DATABASE `xtracker`;
USE `xtracker`;
DROP TABLE IF EXISTS `user_accounts`;
CREATE TABLE `user_accounts` (
`accountId` INT(10) NOT NULL,
`email` VARCHAR(100) NOT NULL,
`password` VARCHAR(100) NOT NULL,
`first_name` VARCHAR(50) NOT NULL,
`last_name` VARCHAR(50) NOT NULL,
`user_type` VARCHAR(50) NOT NULL,
`api_key` VARCHAR(100) NOT NULL,
`status` VARCHAR(10) NOT NULL,
PRIMARY KEY (`accountId`),
UNIQUE KEY `accountId` (`accountId`, `email`)
);
|
SET IDENTITY_INSERT [dbo].[Company] ON;
INSERT INTO [dbo].[Company]
(
[CompanyID], [CompanyName], [FloThruFee], [MaxVisibleRatePerHour], [IsHavingFTAlumni]
)
VALUES
(1, 'Company One', 3, 100, 1),
(2, 'Company Two', 3, 100, 0),
(3, 'Company Three', 3, 100, 0),
(4, 'Company One Division 1', 3, 100, 0);
SET IDENTITY_INSERT [dbo].[Company] OFF;
SET IDENTITY_INSERT [dbo].[Company_ParentChildRelationship] ON;
INSERT INTO [dbo].[Company_ParentChildRelationship]
(
[RelationshipID], [ParentID], [ChildID]
)
VALUES
(1, 1, 4);
SET IDENTITY_INSERT [dbo].[Company_ParentChildRelationship] OFF;
SET IDENTITY_INSERT [dbo].[CompanyDomain] ON;
INSERT INTO [dbo].[CompanyDomain]
(
CompanyDomainID, CompanyID, EmailDomain
)
VALUES
(1, 1, 'companyone'),
(2, 2, 'companytwo'),
(3, 3, 'companythree'),
(4, 4, 'division1.companyone');
SET IDENTITY_INSERT [dbo].[CompanyDomain] OFF;
SET IDENTITY_INSERT [dbo].[CompanyProject] ON;
INSERT INTO [dbo].[CompanyProject]
(
CompanyProjectID,
ProjectID,
Description,
CompanyID
)
VALUES
(1, '', 'Project One', 1),
(2, '', 'Project Two', 3),
(3, '', 'General Project', 3),
(4, '', 'Operations Mobile Field Inspection', 1);
SET IDENTITY_INSERT [dbo].[CompanyProject] OFF;
SET IDENTITY_INSERT [dbo].[ContractProjectPO] ON;
INSERT INTO [dbo].[ContractProjectPO]
(
ContractProjectPOID,
CompanyProjectID,
AgreementID,
InActive,
IsGeneralProjectPO,
InactiveForUser,
verticalid
)
VALUES
(1, 1, 1, 0, 0, 0, 4),
(2, 2, 1, 0, 0, 0, 4),
(3, 3, 1, 0, 0, 0, 4);
SET IDENTITY_INSERT [dbo].[ContractProjectPO] OFF;
SET IDENTITY_INSERT [dbo].[CompanyPO] ON;
INSERT INTO [dbo].[CompanyPO]
(
CompanyPOID,
PONumber,
Description,
CompanyID,
InActive,
CreateDate,
CreateUserID,
verticalid
)
VALUES
(1, 'PO Number', 'Description', 1, 0, '2013-12-11', 1, 4),
(2, 'PO Number', 'Description', 2, 0, '2013-12-11', 2, 4),
(3, 'PO Number', 'Description', 3, 0, '2013-12-11', 3, 4);
SET IDENTITY_INSERT [dbo].[CompanyPO] OFF;
SET IDENTITY_INSERT [dbo].[CompanyPOProjectMatrix] ON;
INSERT INTO [dbo].[CompanyPOProjectMatrix]
(
CompanyPOProjectMatrixID,
CompanyPOID,
CompanyProjectID,
InActive,
verticalid
)
VALUES
(1, 1, 1, 0, 4),
(2, 2, 2, 0, 4),
(3, 3, 3, 0, 4);
SET IDENTITY_INSERT [dbo].[CompanyPOProjectMatrix] OFF;
|
package com.android.sipdemo.listener
/**
* Created by zhoujian on 2017/3/20.
*/
interface OnStateChangedListener {
fun onChanged(state: Int, o: Object?)
}
|
import { Injectable } from '@angular/core';
import { Router, Resolve, RouterStateSnapshot,
ActivatedRouteSnapshot } from '@angular/router';
import { HomeService } from '../../services';
import { Home } from '../../models';
@Injectable()
export class HomeResolver implements Resolve<Home> {
constructor(private homeService: HomeService, private router: Router) {}
resolve(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): Promise<Home> {
return this.homeService.list().map(homes => homes[0]).toPromise();
}
}
|
export type Magical<T> = {
/**
* Summon the up-to-date magical state.
*/
accio: () => T,
/**
* Extend an extendable ear into the Magic, and you'll hear when changes happen.
* @param spell The spell to perform when the extendable ear hears changes.
*/
extendEar: (spell: Spell<T>) => number,
/**
* Perform a memory charm on your magical state. Erase the old memory, and replace it with a new one.
* @param memory The new memory that replaces the old.
*/
obliviate: (newMemory: T) => void,
}
export type Spell<T> = (obliviatedMagical: T, previousMagical: T) => void
/**
* Create some magical state.
* @param initialState The initial state of the magical state.
*/
export const useMagical = <T>(initial: T): Magical<T> => {
// `magical` is the state. The state is magical.
let magical: T = initial
const accio: Magical<T>['accio'] = () => magical
const extendEar: Magical<T>['extendEar'] = (spell, options = { eagerness: 'eager' }) => {
if (options.eagerness === 'eager') spell(magical, undefined)
return extendableEars.push(spell)
}
const extendableEars: Parameters<Magical<T>['extendEar']>[0][] = []
const obliviate: Magical<T>['obliviate'] = newMemory => {
const previousMagical = magical
magical = newMemory
for (const ear of extendableEars) {
ear(magical, previousMagical)
}
}
return {
accio,
extendEar,
obliviate,
}
}
/**
* Extend the same extendable ear into multiple instances of Magic.
* @param spell A special kind of spell, performed on multiple pieces of current and previous magical state.
*/
export const useExtendableEar = (
spell: (indexOfObliviated: number, obliviatedMagicals: any[], previousMagicals: any[]) => void,
...magicals: Magical<any>[]
): number => {
for (let i = 0; i < magicals.length; i++) {
magicals[i].extendEar((obliviatedMagical, previousMagical) => {
if (obliviatedMagical === previousMagical) return
spell(
i,
magicals.map(magical => magical.accio()),
[
...magicals.slice(0, i).map(magical => magical.accio()),
previousMagical,
...magicals.slice(i + 1).map(magical => magical.accio()),
]
)
})
}
return magicals.length
}
export type Conjuror = {
accio: () => Element,
conjure: (tag: string) => Conjuror,
conjured: Conjuror[]
}
export function useConjuror (id?: string) {
const factory = (element: Element): Conjuror => {
const accio: Conjuror['accio'] = () => element
const conjure: Conjuror['conjure'] = tag => {
const el = document.createElement(tag)
const c = factory(el)
conjured.push(c)
element.appendChild(el)
return c
}
const conjured: Conjuror['conjured'] = []
return {
accio,
conjure,
conjured,
}
}
return factory(document.getElementById(id))
}
|
/**
*
* @版权 : Copyright (c) 2017-2018 *********公司技术开发部
* @author: 作者姓名(一般是写姓名的拼音)
* @E-mail: 邮箱
* @版本: 1.0
* @创建日期: 2019年9月21日 下午11:21:52
* @ClassName GoodsController
* @类描述-Description: TODO(这里用一句话描述这个方法的作用)
* @修改记录:
* @版本: 1.0
*/
package controller;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
@Controller
public class GoodsController {
@RequestMapping("/addGoods")
public String add(String goodsname, double goodsprice, int goodsnumber) {
double total = goodsprice * goodsnumber;
System.out.println(total);
return "success";
}
}
|
-- file:plpgsql.sql ln:4597 expect:true
CREATE FUNCTION transition_table_level1_ri_parent_upd_func()
RETURNS TRIGGER
LANGUAGE plpgsql
AS $$
DECLARE
x int
|
def cons(x, y)
->(m) { m.call(x, y) }
end
def car(z)
z.call(->(p, q) { p })
end
def cdr(z)
z.call(->(p, q) { q })
end
puts car(cons(3, 10))
puts cdr(cons(3, 10))
|
// To parse this JSON data, do
// final cast = castFromJson(jsonString);
import 'dart:convert';
class Cast {
final int castId;
final String character;
final String creditId;
final int gender;
final int id;
final String name;
final int order;
final String profilePath;
Cast({
this.castId,
this.character,
this.creditId,
this.gender,
this.id,
this.name,
this.order,
this.profilePath,
});
factory Cast.fromRawJson(String str) => Cast.fromJson(json.decode(str));
String toRawJson() => json.encode(toJson());
factory Cast.fromJson(Map<String, dynamic> json) => Cast(
castId: json["cast_id"],
character: json["character"],
creditId: json["credit_id"],
gender: json["gender"],
id: json["id"],
name: json["name"],
order: json["order"],
profilePath: json["profile_path"],
);
Map<String, dynamic> toJson() => {
"cast_id": castId,
"character": character,
"credit_id": creditId,
"gender": gender,
"id": id,
"name": name,
"order": order,
"profile_path": profilePath,
};
}
|
---
title: Mere
letter: M
permalink: "/definitions/bld-mere-3.html"
body: L. Fr. Mother. JBle, mere, flllc, grandmother, mother, daughter. Brltt c. 89.
En ventre sa mere, in its mother's womb
published_at: '2018-07-07'
source: Black's Law Dictionary 2nd Ed (1910)
layout: post
---
|
# frozen_string_literal: true
require "much-rails-pub-sub"
require "much-rails-pub-sub/event"
class MuchRailsPubSub::Publisher
include MuchRails::CallMethod
attr_reader :event
def initialize(event_name, event_params:)
@event = MuchRailsPubSub::Event.new(event_name, params: event_params)
end
def on_call
raise NotImplementedError
end
def event_id
event.id
end
def event_name
event.name
end
def event_params
event.params
end
private
def publish_params
{
"event_id" => event_id,
"event_name" => event_name,
"event_params" => event_params,
}
end
end
|
package com.twitter.scrooge.backend
import com.twitter.scrooge.testutil.Spec
import com.twitter.scrooge.{HasThriftStructCodec3, ThriftStructCodec3}
import thrift.test._
class HasThriftStructCodec3Spec extends Spec {
"All ThriftStructs" should {
"have a codec method via HasThriftStructCodec3" in {
val struct = RequiredString("yo")
struct.isInstanceOf[HasThriftStructCodec3[RequiredString]] must be(true)
struct._codec.isInstanceOf[ThriftStructCodec3[RequiredString]] must be(true)
val e = Xception(10, "yo")
e.isInstanceOf[HasThriftStructCodec3[Xception]] must be(true)
e._codec.isInstanceOf[ThriftStructCodec3[Xception]] must be(true)
val union = EnumUnion.Text("yo")
union.isInstanceOf[HasThriftStructCodec3[EnumUnion]] must be(true)
union._codec.isInstanceOf[ThriftStructCodec3[EnumUnion]] must be(true)
}
}
}
|
# frozen_string_literal: true
require 'jiji/configurations/mongoid_configuration'
require 'jiji/utils/pagenation'
module Jiji::Model::Logging
class Log
include Enumerable
include Jiji::Utils::Pagenation
def initialize(time_source, backtest_id = nil)
@backtest_id = backtest_id
@time_source = time_source
end
def get(index)
query = Query.new(filter, { timestamp: :asc }, index, 1)
data = query.execute(LogData)
return @current if @current && data.length == index
data[0]
end
def count
count = LogData.where(filter).count
@current ? count + 1 : count
end
def write(message)
@current ||= create_log_data
@current << message
shift if @current.full?
end
def close
save_current_log_data
end
private
def shift
save_current_log_data
@current = create_log_data
end
def create_log_data
LogData.create(@time_source.now, nil, @backtest_id)
end
def save_current_log_data
@current&.save
@current = nil
end
def filter
{ backtest_id: @backtest_id }
end
end
end
|
/*
Navicat MySQL Data Transfer
Source Server : localhost
Source Server Version : 50536
Source Host : localhost:3306
Source Database : absence
Target Server Type : MYSQL
Target Server Version : 50536
File Encoding : 65001
Date: 2016-07-06 20:13:12
*/
SET FOREIGN_KEY_CHECKS=0;
-- ----------------------------
-- Table structure for `course`
-- ----------------------------
DROP TABLE IF EXISTS `course`;
CREATE TABLE `course` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`cGrade` varchar(5) DEFAULT NULL,
`cName` varchar(20) DEFAULT NULL,
`tID` varchar(13) DEFAULT NULL,
`cClass` varchar(30) DEFAULT NULL,
`aID` varchar(13) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=15 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of course
-- ----------------------------
INSERT INTO course VALUES ('2', '2014', '数据结构', '1', '14-2', '10120070');
INSERT INTO course VALUES ('4', '2013', 'C#', '1', '13-2', '10120070');
INSERT INTO course VALUES ('9', '2016', '项目实训', '1', '16-3', '10120070');
INSERT INTO course VALUES ('12', '2017', 'Java', '1', '17-2', '10120070');
INSERT INTO course VALUES ('13', '2017', 'JavaWeb', '1', '17-5', '10120070');
INSERT INTO course VALUES ('14', '2018', 'JavaWeb', '1', '18-3', '10120070');
-- ----------------------------
-- Table structure for `present`
-- ----------------------------
DROP TABLE IF EXISTS `present`;
CREATE TABLE `present` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`sID` varchar(13) DEFAULT NULL,
`sName` varchar(20) DEFAULT NULL,
`sDelDate` date DEFAULT NULL,
`sAddTimes` int(11) DEFAULT NULL,
`sCourse` varchar(255) DEFAULT NULL,
`sTeacher` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=29 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of present
-- ----------------------------
INSERT INTO present VALUES ('5', '1427152083', '杨森源', '2016-07-12', null, '2', '1');
INSERT INTO present VALUES ('6', '1427152073', '李贤', '2016-07-12', null, '4', '1');
INSERT INTO present VALUES ('23', '1427152073', '李贤', '2016-07-07', null, '9', '1');
INSERT INTO present VALUES ('24', '1427152073', '李贤', '2016-07-06', null, '2', '1');
INSERT INTO present VALUES ('25', '1427152073', '李贤', '2016-07-06', null, '2', '1');
-- ----------------------------
-- Table structure for `student`
-- ----------------------------
DROP TABLE IF EXISTS `student`;
CREATE TABLE `student` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`sID` varchar(13) DEFAULT NULL,
`sName` varchar(20) DEFAULT NULL,
`sCourse` varchar(30) DEFAULT NULL,
`sAssistant` varchar(13) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of student
-- ----------------------------
INSERT INTO student VALUES ('8', '1427152073', '李贤', '4', '10120070');
INSERT INTO student VALUES ('9', '1427152083', '杨森源', '4', '10120070');
INSERT INTO student VALUES ('10', '1427152073', '李贤', '4', '10120070');
INSERT INTO student VALUES ('11', '1427152083', '杨森源', '4', '10120070');
INSERT INTO student VALUES ('12', '1427152073', '李贤', '4', '10120070');
INSERT INTO student VALUES ('13', '1427152083', '杨森源', '4', '10120070');
INSERT INTO student VALUES ('16', '1427152073', '李贤', '2', '10120070');
INSERT INTO student VALUES ('17', '1427152083', '杨森源', '2', '10120070');
INSERT INTO student VALUES ('20', '1427152073', '李贤', '9', '10120070');
INSERT INTO student VALUES ('21', '1427152083', '杨森源', '9', '10120070');
INSERT INTO student VALUES ('22', '1427152073', '李贤', '12', '10120070');
INSERT INTO student VALUES ('23', '1427152083', '杨森源', '12', '10120070');
INSERT INTO student VALUES ('24', '1427152073', '李贤', '13', '10120070');
INSERT INTO student VALUES ('25', '1427152083', '杨森源', '13', '10120070');
INSERT INTO student VALUES ('26', '1427152073', '李贤', '14', '10120070');
INSERT INTO student VALUES ('27', '1427152083', '杨森源', '14', '10120070');
-- ----------------------------
-- Table structure for `tuser`
-- ----------------------------
DROP TABLE IF EXISTS `tuser`;
CREATE TABLE `tuser` (
`tID` varchar(255) NOT NULL,
`tName` varchar(50) DEFAULT NULL,
`tPassword` varchar(80) DEFAULT NULL,
`tIsAssistant` int(2) DEFAULT '0',
PRIMARY KEY (`tID`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of tuser
-- ----------------------------
INSERT INTO tuser VALUES ('1', '李贤', 'e10adc3949ba59abbe56e057f20f883e', '0');
INSERT INTO tuser VALUES ('10120070', '姚莉', '123456', '1');
|
<?php
namespace Home\Model;
use Think\Model;
class WayModel extends Model{
//所有互动的经验
public function allway(){
$way = M('way');
$data = $way->order('wid desc')->select();
return $data;
}
//添加经验的业务数据模型
public function addway($wtitle,$wcontent,$wuserid,$wdate){
$way = M('way');
$data = array(
'wtitle'=> "$wtitle",
'wcontent'=> "$wcontent",
'wuserid' => "$wuserid",
'wdate' => "$wdate",
);
return $way->add($data);
}
}
|
RSpec.describe Mas::Cms::Connection do
subject(:connection) { described_class.new(cache) }
let(:cache) { spy(:cache) }
describe '.new' do
let(:config) { Mas::Cms::Client.config }
let(:options) do
{
url: config.host,
request: {
timeout: config.timeout,
open_timeout: config.open_timeout
}
}
end
before { allow(Faraday).to receive(:new).with(options) }
it 'builds a connection' do
expect(Faraday).to receive(:new).with(options)
Mas::Cms::Connection.new
end
it 'accepts a cache object' do
conn = Mas::Cms::Connection.new(cache)
expect(conn.cache).to eq(cache)
end
end
describe '.get' do
let(:path) { '/test/me.json' }
let(:response) { double(status: status, headers: {}, body: {}) }
let(:status) { 200 }
context 'when sending params' do
let(:params) { { document_type: 'Insight' } }
it 'delegates to raw_connection with params' do
expect(connection.raw_connection).to receive(:get).with(
path,
params
).and_return(response)
connection.get(path, params: params, cached: false)
end
end
context 'when successful and not cached' do
it 'delegates to raw_connection' do
expect(connection.raw_connection).to receive(:get).with(path, nil).and_return(response)
connection.get(path, cached: false)
end
end
context 'when successful and cached' do
it 'calls cache object' do
expect(connection.cache).to receive(:fetch).and_return(response)
connection.get(path, cached: true)
end
end
context 'when response is a redirection' do
before { allow(connection.raw_connection).to receive(:get).and_return(response) }
let(:status) { 301 }
it 'raises a `HttpRedirect` instance for http 301 status' do
expect { connection.get(path) }.to raise_exception Mas::Cms::HttpRedirect
end
end
context 'when response body is null' do
let(:response) { double(status: status, headers: {}, body: nil) }
before { allow(connection.raw_connection).to receive(:get).and_return(response) }
it 'raises an `Mas::Cms::Connection::ResourceNotFound error' do
expect { connection.get(path) }.to raise_error(Mas::Cms::Errors::ResourceNotFound)
end
end
context 'when resource not found' do
before do
allow(connection.raw_connection)
.to receive(:get)
.with(path, nil)
.and_raise(Faraday::Error::ResourceNotFound, 'foo')
end
it 'raises an `Mas::Cms::Connection::ResourceNotFound error' do
expect { connection.get(path) }.to raise_error(Mas::Cms::Errors::ResourceNotFound)
end
end
context 'when connection failed' do
before do
allow(connection.raw_connection)
.to receive(:get)
.with(path, nil)
.and_raise(Faraday::Error::ConnectionFailed, 'foo')
end
it 'raises an `Mas::Cms::Connection::ConnectionFailed error' do
expect { connection.get(path) }.to raise_error(Mas::Cms::Errors::ConnectionFailed)
end
end
context 'when client error' do
before do
allow(connection.raw_connection)
.to receive(:get)
.with(path, nil)
.and_raise(Faraday::Error::ClientError.new('foo', status: 500))
end
it 'raises an `Mas::Cms::Connection::ClientError error' do
expect { connection.get(path) }.to raise_error(Mas::Cms::Errors::ClientError)
end
end
end
describe '.post' do
let(:params) { '/test/me.json' }
context 'when successfull' do
it 'delegates to raw_connection' do
expect(connection.raw_connection).to receive(:post).with(params)
connection.post(params)
end
end
context 'when connection failed' do
before do
allow(connection.raw_connection)
.to receive(:post)
.with(params)
.and_raise(Faraday::Error::ConnectionFailed, 'foo')
end
it 'raises an `Mas::Cms::Connection::ConnectionFailed error' do
expect { connection.post(params) }.to raise_error(Mas::Cms::Errors::ConnectionFailed)
end
end
context 'when ssl error' do
let(:faraday_exception) do
Faraday::SSLError.new('SSL_connect returned=1 errno=0')
end
before do
allow(connection.raw_connection)
.to receive(:post)
.with(params)
.and_raise(faraday_exception)
end
it 'raises an `Mas::Cms::Connection::ClientError error' do
expect { connection.post(params) }.to raise_error(
Mas::Cms::Errors::ClientError, 'SSL_connect returned=1 errno=0'
)
end
end
context 'when client error' do
before do
allow(connection.raw_connection)
.to receive(:post)
.with(params)
.and_raise(Faraday::Error::ClientError.new('foo', status: 500))
end
it 'raises an `Mas::Cms::Connection::ClientError error' do
expect { connection.post(params) }.to raise_error(Mas::Cms::Errors::ClientError)
end
end
context 'when unprocessable entity' do
before do
allow(connection.raw_connection)
.to receive(:post)
.with(params)
.and_raise(Faraday::Error::ClientError.new('foo', status: 422))
end
it 'raises an `Mas::Cms::Connection::UnprocessableEntity error' do
expect { connection.post(params) }.to raise_error(Mas::Cms::Errors::UnprocessableEntity)
end
end
end
describe '.post' do
let(:params) { '/test/me.json' }
context 'when successfull' do
it 'delegates to raw_connection' do
expect(connection.raw_connection).to receive(:patch).with(params)
connection.patch(params)
end
end
context 'when unprocessable entity' do
before do
allow(connection.raw_connection)
.to receive(:patch)
.with(params)
.and_raise(Faraday::Error::ClientError.new('foo', status: 422))
end
it 'raises an `Mas::Cms::Connection::UnprocessableEntity error' do
expect { connection.patch(params) }.to raise_error(Mas::Cms::Errors::UnprocessableEntity)
end
end
end
end
|
tinyMCE.addI18n("cs.coffelli_contextmenu",{
coffelli_contextmenu_insertpbefore_desc:"Vlož odstavec PŘED aktuální element",
coffelli_contextmenu_insertpafter_desc:"Vlož odstavec ZA aktuální element",
coffelli_contextmenu_insertpbeforeroot_desc:"Vlož odstavec PŘED aktuální KOŘENOVÝ element",
coffelli_contextmenu_insertpafterroot_desc:"Vlož odstavec ZA aktuální KOŘENOVÝ element",
coffelli_contextmenu_delete_desc:"Smazat aktuální element",
coffelli_contextmenu_deleteroot_desc:"Smazat aktuální KOŘENOVÝ element",
coffelli_contextmenu_moveup_desc:"Posunout element NAHORU",
coffelli_contextmenu_moveuproot_desc:"Posunout aktuální kořenový element NAHORU",
P_coffelli_contextmenu_insertpbefore_desc:"Vlož odstavec PŘED aktuální element",
P_coffelli_contextmenu_insertpafter_desc:"Vlož odstavec ZA aktuální element",
P_coffelli_contextmenu_insertpbeforeroot_desc:"Vlož odstavec PŘED aktuální KOŘENOVÝ element",
P_coffelli_contextmenu_insertpafterroot_desc:"Vlož odstavec ZA aktuální KOŘENOVÝ element",
P_coffelli_contextmenu_delete_desc:"Smazat aktuální element",
P_coffelli_contextmenu_deleteroot_desc:"Smazat aktuální KOŘENOVÝ element",
P_coffelli_contextmenu_moveup_desc:"Posunout element NAHORU",
P_coffelli_contextmenu_moveuproot_desc:"Posunout aktuální kořenový element NAHORU",
});
|
package com.hector.engine.graphics;
import org.lwjgl.opengl.GL11;
import org.lwjgl.opengl.GL20;
import org.lwjgl.opengl.GL30;
import org.lwjgl.opengl.GL43;
public class FrameBuffer {
private int width, height;
private int id;
public FrameBuffer(int width, int height) {
this.width = width;
this.height = height;
createTexture();
}
private void createTexture() {
this.id = GL20.glGenTextures();
bind();
{
GL43.glTexStorage2D(GL11.GL_TEXTURE_2D, 1, GL30.GL_RGBA32F, width, height);
}
unbind();
}
public void resize(int width, int height) {
this.width = width;
this.height = height;
destroy();
createTexture();
}
public void bind() {
GL43.glBindTexture(GL11.GL_TEXTURE_2D, id);
}
public void unbind() {
GL43.glBindTexture(GL11.GL_TEXTURE_2D, 0);
}
public void destroy() {
GL11.glDeleteTextures(id);
}
public int getWidth() {
return width;
}
public int getHeight() {
return height;
}
public int getId() {
return id;
}
}
|
package it.carminepat.systeminfo;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
*
* @author carminepat
*/
public class TestInformation {
/**
* @param args the command line arguments
*/
public static void main(String[] args) throws Exception {
ObjectMapper mapper = new ObjectMapper();
String jsonOs = mapper.writeValueAsString(Os.i());
System.out.println(jsonOs);
// String jsonCpu=mapper.writeValueAsString(Cpu.i());
// System.out.println(jsonCpu);
// String jsonMemory=mapper.writeValueAsString(Memory.i());
// System.out.println(jsonMemory);
// String jsonDisk=mapper.writeValueAsString(Disk.i());
// System.out.println(jsonDisk);
// String jsonProcess = mapper.writeValueAsString(Process.i().getListOfFirstFiveProcess());
// System.out.println(jsonProcess);
// String jsonServices = mapper.writeValueAsString(Services.i().getListOfServices());
// System.out.println(jsonServices);
//Os.i().setEnvironmentVariable("UTILS_PUBLISYS", "C:\\progetti\\aziendali\\publisys");
// System.out.println(mapper.writeValueAsString(Program.i().getPrograms()));
// System.out.println(mapper.writeValueAsString(Net.i()));
}
}
|
<h1 align="center">
Portfolio build with Gatsby.
</h1>
<p align="center">
See live at
<a href="http://handcraftedweb.dev/">
http://handcraftedweb.dev/
</a>
</p>
|
/* vim: set sw=8 ts=8 sts=8 expandtab: */
#include "ewl_base.h"
#include "ewl_icon_theme.h"
#include "ewl_macros.h"
#include "ewl_private.h"
#include "ewl_debug.h"
#ifdef BUILD_EFREET_SUPPORT
# include <Efreet.h>
#endif
static int ewl_icon_theme_is_edje = 0;
static Ecore_Hash *ewl_icon_theme_cache = NULL;
static Ecore_Hash *ewl_icon_fallback_theme_cache = NULL;
static void ewl_icon_theme_cb_free(void *data);
static const char *ewl_icon_theme_icon_path_get_helper(const char *icon,
unsigned int size, const char *theme,
const char *key, Ecore_Hash *cache);
/**
* @return Returns TRUE on success or FALSE on failure
* @brief Initializes the icon theme system
*/
int
ewl_icon_theme_init(void)
{
DENTER_FUNCTION(DLEVEL_STABLE);
if (!ewl_icon_theme_cache)
{
ewl_icon_theme_cache = ecore_hash_new(ecore_str_hash, ecore_str_compare);
ecore_hash_free_key_cb_set(ewl_icon_theme_cache, ewl_icon_theme_cb_free);
ecore_hash_free_value_cb_set(ewl_icon_theme_cache, free);
ewl_icon_fallback_theme_cache = ecore_hash_new(
ecore_str_hash, ecore_str_compare);
ecore_hash_free_key_cb_set(ewl_icon_fallback_theme_cache,
ewl_icon_theme_cb_free);
ecore_hash_free_value_cb_set(ewl_icon_fallback_theme_cache,
free);
}
DRETURN_INT(TRUE, DLEVEL_STABLE);
}
/**
* @return Returns no value.
* @brief Shuts down the icon theme system
*/
void
ewl_icon_theme_shutdown(void)
{
DENTER_FUNCTION(DLEVEL_STABLE);
IF_FREE_HASH(ewl_icon_theme_cache);
IF_FREE_HASH(ewl_icon_fallback_theme_cache);
DLEAVE_FUNCTION(DLEVEL_STABLE);
}
/**
* @return Returns no value
* @brief Called when the icon theme is changed so we can clean up any
* caching we have in place
*/
void
ewl_icon_theme_theme_change(void)
{
const char *icon_theme;
DENTER_FUNCTION(DLEVEL_STABLE);
icon_theme = ewl_config_string_get(ewl_config, EWL_CONFIG_THEME_ICON_THEME);
/* check if this is an edje theme */
if (icon_theme && (!strncasecmp(icon_theme + (strlen(icon_theme) - 4),
".edj", 4)))
ewl_icon_theme_is_edje = 1;
else
ewl_icon_theme_is_edje = 0;
/* destroy the cache and re-create it */
IF_FREE_HASH(ewl_icon_theme_cache);
ewl_icon_theme_cache = ecore_hash_new(ecore_str_hash, ecore_str_compare);
ecore_hash_free_key_cb_set(ewl_icon_theme_cache, ewl_icon_theme_cb_free);
ecore_hash_free_value_cb_set(ewl_icon_theme_cache, free);
DLEAVE_FUNCTION(DLEVEL_STABLE);
}
/**
* @param icon: The Icon Spec icon name to lookup
* @param size: The size of the icon to retrieve. A 0 value will cause
* the default size to be used.
* @return Returns the path to the icon we are looking for or NULL if none found
* @brief Retrives the full path to the specified icon, or NULL if none found
*/
const char *
ewl_icon_theme_icon_path_get(const char *icon, int size)
{
const char *ret;
const char *icon_theme;
char key[256];
DENTER_FUNCTION(DLEVEL_STABLE);
DCHECK_PARAM_PTR_RET(icon, NULL);
icon_theme = ewl_config_string_get(ewl_config,
EWL_CONFIG_THEME_ICON_THEME);
/* make sure we have an icon theme */
if (!icon_theme)
DRETURN_PTR(NULL, DLEVEL_STABLE);
/* if our theme is an edje just return the .edj file */
if (ewl_icon_theme_is_edje)
DRETURN_PTR(icon_theme, DLEVEL_STABLE);
if (size == 0)
size = ewl_config_int_get(ewl_config,
EWL_CONFIG_THEME_ICON_SIZE);
snprintf(key, sizeof(key), "%s@%d", icon, size);
ret = ewl_icon_theme_icon_path_get_helper(icon, size, icon_theme,
key, ewl_icon_theme_cache);
if (ret == EWL_THEME_KEY_NOMATCH)
ret = ewl_icon_theme_icon_path_get_helper(icon, size, "EWL",
key, ewl_icon_fallback_theme_cache);
if (ret == EWL_THEME_KEY_NOMATCH)
ret = NULL;
DRETURN_PTR(ret, DLEVEL_STABLE);
}
static const char *
ewl_icon_theme_icon_path_get_helper(const char *icon, unsigned int size,
const char *theme, const char *key,
Ecore_Hash *cache)
{
char *ret;
DENTER_FUNCTION(DLEVEL_STABLE);
DCHECK_PARAM_PTR_RET(icon, EWL_THEME_KEY_NOMATCH);
#if BUILD_EFREET_SUPPORT
ret = ecore_hash_get(cache, key);
if (!ret)
{
/* XXX: How to store NOMATCH in the cache? The cache is strings which must be free'd */
ret = efreet_icon_path_find(theme, icon, size);
if (!ret) ret = EWL_THEME_KEY_NOMATCH;
else ecore_hash_set(cache, strdup(key), (void *)ret);
}
#else
ret = EWL_THEME_KEY_NOMATCH;
#endif
DRETURN_PTR(ret, DLEVEL_STABLE);
}
static void
ewl_icon_theme_cb_free(void *data)
{
DENTER_FUNCTION(DLEVEL_STABLE);
if (data == EWL_THEME_KEY_NOMATCH)
DRETURN(DLEVEL_STABLE);
IF_FREE(data);
DLEAVE_FUNCTION(DLEVEL_STABLE);
}
|
import argparse
import sys
from utils import get_reverse_complement, read_from_file
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'-f', '--fn_input',
help='input file, can be .txt or .fasta'
)
args = parser.parse_args()
return args
def process_seqs_for_grep(list_seqs):
list_rc = []
for seq in list_seqs:
list_rc.append(get_reverse_complement(seq))
set_all = set(list_seqs).union(set(list_rc))
return set_all
if __name__ == '__main__':
args = parse_args()
fn_input = args.fn_input
list_seqs = read_from_file(fn_input)
sys.stderr.write('Length of list {0} = {1}\n'.format(fn_input, len(list_seqs)))
set_all = process_seqs_for_grep(list_seqs)
sys.stderr.write('Size of set for forward/reverse seqs = {}\n'.format(len(set_all)))
print ('\|'.join(set_all))
|
@extends('emails.master')
@section('content')
<div>
<p>
@lang('emails.hi')<br/>
</p>
<p>
@lang('emails.design.congrates')
@if (isset($design->design_name))
<b>{{ $design->design_name }}</b> /
@endif
@lang('emails.design.with.code')
<b>{{$design->code}}</b>
{{-- @if (isset($design->request->custom_id))
(<a href="{{ url('/design/' . $design->code) }}">{{ $design->request->custom_id }}</a>)
@endif --}}
@lang('emails.design.congrates.2')
<br/>
<br/>
<img src="{{ url('/api/v1/image/thumbnail/design/' . $design->code) }}" width="200" height="200">
<br/>
</p>
<p>
@lang('emails.design.overview.message')
</p>
@include('emails.ending')
</div>
@endsection
|
import { BooleanFilter } from './BooleanFilter'
import { IdFilter } from './IdFilter'
import { KeywordFilter } from './KeywordFilter'
import { OrderFilter } from './OrderFilter'
import { PageFilter } from './PageFilter'
import { PageSizeFilter } from './PageSizeFilter'
import { SelectFilter } from './SelectFilter'
import { TypeFilter } from './TypeFilter'
export const filters = [
new IdFilter(),
new TypeFilter(),
new PageFilter(),
new PageSizeFilter(),
new KeywordFilter(),
new OrderFilter(),
new BooleanFilter(),
new SelectFilter()
]
|
package com.open.capacity.security.controller;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.google.common.collect.Lists;
import com.open.capacity.security.annotation.LogAnnotation;
import com.open.capacity.security.dao.PermissionDao;
import com.open.capacity.security.dao.ServerDao;
import com.open.capacity.security.dto.LoginUser;
import com.open.capacity.security.model.Permission;
import com.open.capacity.security.model.SysServer;
import com.open.capacity.security.service.PermissionService;
import com.open.capacity.security.utils.UserUtil;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.*;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
/**
* @author wjh
* @create 2018-05-07 11:53
* @desc 服务管理
**/
@Api(tags = "服务管理")
@RestController
@RequestMapping("/sys/server")
public class ServerController {
@Autowired
private ServerDao serverDao;
/**
* 服务列表
*
* @param pId
* @param serversAll
* @param list
*/
private void setServerList(Long pId, List<SysServer> serversAll, List<SysServer> list) {
for (SysServer per : serversAll) {
if (per.getParentId().equals(pId)) {
list.add(per);
if (serversAll.stream().filter(p -> p.getParentId().equals(per.getId())).findAny() != null) {
setServerList(per.getId(), serversAll, list);
}
}
}
}
/**
* 根据模块ID获取服务列表
* @return
*/
@PostMapping("/list")
@ApiOperation(value = "根据模块ID获取服务列表")
@PreAuthorize("hasAuthority('sys:server:query')")
public List<SysServer> serversList(Long moduleId) {
List<SysServer> serversAll = serverDao.listAll(moduleId);
List<SysServer> list = Lists.newArrayList();
setServerList(0L, serversAll, list);
return list;
}
@PostMapping("/all")
@ApiOperation(value = "根据模块ID获取所有服务")
@PreAuthorize("hasAuthority('sys:server:query')")
public JSONArray serversAll(Long moduleId) {
List<SysServer> serversAll = serverDao.listAll(moduleId);
JSONArray array = new JSONArray();
setServersTree(0L, serversAll, array);
return array;
}
/**
* 服务树
*
* @param pId
* @param serversAll
* @param array
*/
private void setServersTree(Long pId, List<SysServer> serversAll, JSONArray array) {
for (SysServer per : serversAll) {
if (per.getParentId().equals(pId)) {
String string = JSONObject.toJSONString(per);
JSONObject parent = (JSONObject) JSONObject.parse(string);
array.add(parent);
if (serversAll.stream().filter(p -> p.getParentId().equals(per.getId())).findAny() != null) {
JSONArray child = new JSONArray();
parent.put("child", child);
setServersTree(per.getId(), serversAll, child);
}
}
}
}
@LogAnnotation
@PostMapping
@ApiOperation(value = "保存服务")
@PreAuthorize("hasAuthority('sys:server:add')")
public void save(@RequestBody SysServer sysServer) {
serverDao.save(sysServer);
}
@GetMapping("/{id}")
@ApiOperation(value = "根据服务id获取服务")
@PreAuthorize("hasAuthority('sys:server:query')")
public SysServer get(@PathVariable Long id) {
return serverDao.getById(id);
}
@LogAnnotation
@PutMapping
@ApiOperation(value = "修改服务")
@PreAuthorize("hasAuthority('sys:server:add')")
public void update(@RequestBody SysServer sysServer) {
serverDao.update(sysServer);
}
@LogAnnotation
@DeleteMapping("/{id}")
@ApiOperation(value = "删除服务")
@PreAuthorize("hasAuthority('sys:server:del')")
public void delete(@PathVariable Long id) {
serverDao.delete(id);
}
}
|
package com.github.mdr.mash.ns.git
import com.github.mdr.mash.functions.{ BoundParams, MashFunction, ParameterModel }
import com.github.mdr.mash.runtime.MashList
import scala.collection.JavaConverters._
object PullFunction extends MashFunction("git.pull") {
val params = ParameterModel.Empty
def call(boundParams: BoundParams): MashList = {
GitHelper.withGit { git ⇒
val pullResult = git.pull.call()
val fetchResult = pullResult.getFetchResult
val updates = fetchResult.getTrackingRefUpdates.asScala.toSeq
MashList(updates.map(FetchFunction.asMashObject))
}
}
override def typeInferenceStrategy = Seq(FetchBranchUpdateClass)
override def summaryOpt = Some("Fetch from and integrate with another repository or a local branch")
}
|
# Just a tiny component to standardize How we display related_urls, a link
# with a little external link icon.
#
# Takes a String URL
#
# <%= render UrlDisplay.new("https://example.com/foo/bar") %>
#
class ExternalLinkComponent < ApplicationComponent
attr_reader :url
def initialize(url)
@url = url
end
def call
link_to("<i class='fa fa-external-link'></i> ".html_safe + abbreviated_value(url), url, target: "_blank")
end
private
# Just the hostname
def abbreviated_value(v)
v =~ %r{https?\://([^/]+)}
"#{$1}/…"
end
end
|
require 'routemaster/dirty/state'
require 'routemaster/config'
module Routemaster
module Dirty
# Service object, filters an event payload, only include events that reflect
# an entity state that is _more recent_ than previously received events.
#
# Can be used to Ignore events received out-of-order (e.g. an `update` event
# about en entity received after the `delete` event for that same entity),
# given Routemaster makes no guarantee of in-order delivery of events.
#
class Filter
EXPIRY = 86_400
# @param redis [Redis, Redis::Namespace] a connection to Redis, used to
# persists the known state
def initialize(redis:nil)
@redis = redis || Config.drain_redis
@expiry = Config.cache_expiry
end
# Process a payload, and returns part if this payload containing
# only the latest event for a given entity.
#
# Events are skipped if they are older than a previously processed
# event for the same entity.
#
# Order of kept events is not guaranteed to be preserved.
def run(payload)
events = {} # url -> event
payload.each do |event|
known_state = State.get(@redis, event['url'])
# skip events older than what we already know
next if known_state.t > event['t']
new_state = State.new(event['url'], event['t'])
next if new_state == known_state
new_state.save(@redis, @expiry)
events[event['url']] = event
end
events.values
end
end
end
end
|
// AUTOGENERATED FILE - DO NOT MODIFY!
// This file generated by Djinni from open_pgp.djinni
#pragma once
#include <memory>
#include <string>
namespace ProtonMail {
class IGender {
public:
virtual ~IGender() {}
virtual std::string getGender() = 0;
virtual std::string getText() = 0;
static std::shared_ptr<IGender> create_instance(const std::string & sex, const std::string & text);
};
} // namespace ProtonMail
|
[Cookiecutter](https://github.com/audreyr/cookiecutter) template for python3 cli application.
```sh
cookiecutter https://github.com/thawk//cookiecutter-python-cli
```
|
package com.dongtronic.diabot.logic.`fun`
import com.dongtronic.diabot.exceptions.NoSuchEpisodeException
import com.rometools.rome.feed.synd.SyndEntry
import com.rometools.rome.io.FeedException
import com.rometools.rome.io.SyndFeedInput
import com.rometools.rome.io.XmlReader
import java.io.FileNotFoundException
import java.io.IOException
import java.net.URL
object Diacast {
val episodes: List<SyndEntry>
@Throws(FeedException::class, IOException::class)
get() {
val feedSource = URL("https://diacast.cascer1.space/podcast.rss")
val input = SyndFeedInput()
val feed = input.build(XmlReader(feedSource))
return feed.entries
}
@Throws(NoSuchEpisodeException::class, IOException::class, FeedException::class)
fun getEpisode(episode: Int): SyndEntry {
try {
if (episode == 0) {
return episodes[0]
}
for (entry in episodes) {
for (element in entry.foreignMarkup) {
if (element.name == "episode") {
val number = element.value
if (Integer.valueOf(number) == episode) {
return entry
}
}
}
}
} catch (e: FileNotFoundException) {
throw NoSuchEpisodeException()
}
throw NoSuchEpisodeException()
}
}
|
var animation = new Walkway({
selector: '#loader',
duration: '4000',
easing: function(t){
return t * t;
}
});
animation.draw();
var animation_1 = function(){
var t = new TimelineMax({
repeat: 0,
yoyo: false
});
var graph = document.getElementById('graph');
t.timeScale(2);
t.to([graph], 4, {
});
t.to([graph], 0.5, {
alpha: 1,
ease: Power1.easeInOut
});
};
animation_1();
|
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package testsupport
import com.github.tomakehurst.wiremock.client.WireMock
import com.github.tomakehurst.wiremock.verification.LoggedRequest
import org.scalatest.concurrent.{Eventually, IntegrationPatience, ScalaFutures}
import org.scalatest._
import org.scalatest.matchers.should.Matchers
import play.api.libs.json.{JsValue, Json}
import scala.collection.JavaConverters._
import scala.concurrent.ExecutionContext
trait RichMatchers
extends Matchers
with TryValues
with EitherValues
with OptionValues
with AppendedClues
with ScalaFutures
with StreamlinedXml
with Inside
with Eventually
with IntegrationPatience
with JsonSyntax {
implicit lazy val ec: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global
implicit def toLoggedRequestOps(lr: LoggedRequest) = new {
def getBodyAsJson: JsValue = Json.parse(lr.getBodyAsString)
}
/**
* Returns recorded by WireMock request.
* Asserts there was only one request made to wire mock.
* Use it in Connector unit tests.
*/
def getRecordedRequest(): LoggedRequest = {
val allRecordedRequests = WireMock.getAllServeEvents().asScala.map(_.getRequest)
allRecordedRequests should have length 1 withClue "there suppose to be only one request recorded"
allRecordedRequests.head
}
def assertThereWasOnlyOneReqeust() = getRecordedRequest()
}
|
// Package vboolean implements the boolean type.
package vboolean
import "hawx.me/code/vodka/types"
// A boolean value, either `true` or `false`.
type VBoolean struct {
value bool
}
// String returns the string value of the VBoolean.
func (v *VBoolean) String() string {
if v.value {
return "true"
}
return "false"
}
// Value returns the boolean value.
func (v *VBoolean) Value() interface{} {
return v.value
}
// Type returns the name of the type, "boolean".
func (v *VBoolean) Type() string {
return "boolean"
}
// Compare returns 0 if the values are equal, -2 otherwise.
func (v VBoolean) Compare(other types.VType) int {
if val, same := other.(*VBoolean); same {
if val.value == v.value {
return 0
}
}
return -2
}
func (v *VBoolean) Copy() types.VType {
return v
}
// New creates a new VBoolean with the value given.
func New(val bool) *VBoolean {
b := new(VBoolean)
b.value = val
return b
}
// True returns a new true VBoolean.
func True() *VBoolean {
return New(true)
}
// False returns a new false VBoolean.
func False() *VBoolean {
return New(false)
}
|
package de.flapdoodle.photosync.sync
import de.flapdoodle.photosync.progress.Progress
import de.flapdoodle.photosync.ui.sync.SyncGroup
import de.flapdoodle.photosync.ui.sync.SyncGroupID
import de.flapdoodle.photosync.ui.sync.SyncList
interface Synchronizer {
fun sync(
set: SyncList,
enableCopyBack: Boolean = false,
enableRemove: Boolean = false,
listener: (id: SyncGroupID, command: SyncCommand, status: SyncGroup.Status) -> Unit = { _,_,_ -> },
progressListener: (Progress) -> Unit = { _ -> }
)
}
|
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
session(['filtros' => '']);
class UsodeApi extends Controller
{
public function InicioApi(Request $request)
{
//Array para almanecar datos obtenidos como respuesta del api.
//Separar marcas, modelos, tipo y año para la carga de los filtros.
$CarS = array();
$marcas = array();
$modelos = array();
$tipos = array();
$years = array();
//Variables para el control de array()
$j=0;
$k=0;
$m=0;
$n=0;
//inicio de consumo de api
$key = 'VLbzjFUqGE4MmBU';
$url = 'http://54.207.126.58:10000/getCars';
$ch = curl_init();
curl_setopt($ch, CURLOPT_URL, $url);
curl_setopt($ch, CURLOPT_HTTPHEADER, array(
'api-key: '.$key,
'Content-Type: application/json',
));
curl_setopt($ch, CURLOPT_RETURNTRANSFER, TRUE);
curl_setopt($ch, CURLOPT_HTTPAUTH, CURLAUTH_BASIC);
$output = curl_exec($ch);
if($output === false) {
$error = curl_error($ch);
throw new Exception($error, 1);
}
$info = curl_getinfo($ch);
curl_close($ch);
$response = json_decode($output);
//fin de consumo de api
//inicio de carga de datos
for($i=0; $i<count($response); $i++)
{
$CarS[$i]['idCar'] = $response[$i]->idCar;
$CarS[$i]['brand'] = $response[$i]->brand;
$CarS[$i]['model'] = $response[$i]->model;
$CarS[$i]['year'] = $response[$i]->year;
$CarS[$i]['typeVehicle'] = $response[$i]->typeVehicle;
$CarS[$i]['img'] = $response[$i]->img;
//inicio de carga de datos para filtros
if (!in_array($response[$i]->brand, $marcas))
{
$marcas[$j] = $response[$i]->brand;
$j++;
}
if (!in_array($response[$i]->model, $modelos))
{
$modelos[$k] = $response[$i]->model;
$k++;
}
if (!in_array($response[$i]->typeVehicle, $tipos))
{
$tipos[$m] = $response[$i]->typeVehicle;
$m++;
}
if (!in_array($response[$i]->year, $years))
{
$years[$n] = $response[$i]->year;
$n++;
}
//fin de carga de datos para para los filtros
}
//fin de carga de datos
//inicio Almacenamiento en session de los datos
session(['datos' => $CarS]);
session(['marcas' => $marcas]);
session(['modelos' => $modelos]);
session(['tipos' => $tipos]);
session(['years' => $years]);
//fin Almacenamiento en session de los datos
//indico al framework la vista a mostrar.
return view('index');
}
public function Filtrar(Request $request)
{
//carga de filtros
$filtros = session('datos');
$CarS = array();
$j = 0;
foreach ($request->filtros as $subject)
{
for($i=0; $i<count($filtros); $i++)
{
//Busco en mi array de datos la coincidencia con los filtros
if (in_array($subject, $filtros[$i]))
{
//De ser verdadero, cargo un array temporal con los datos correspondientes a los filtros
$CarS[$j] = $filtros[$i];
$j++;
}
}
}
//Almacenamiento en session de los filtros
session(['filtros' => $CarS]);
}
}
|
package com.jhlabs.map.proj;
import java.awt.geom.Point2D;
import com.jhlabs.map.Ellipsoid;
public class CoordinateSystemToCoordinateSystem {
private final static double PI_OVER_2 = Math.PI / 2;
/**
* cosine of 67.5 degrees
*/
private final static double COS_67P5 = 0.38268343236508977;
/**
* Toms region 1 constant
*/
private final static double AD_C = 1.0026000;
private static final double genau = 1.E-12;
private static final double genau2 = (genau * genau);
private static final int maxiter = 30;
public static Point2D.Double transform(Projection fromProjection,
Projection toProjection, Point2D.Double from, Point2D.Double to) {
Point2D.Double intermediate = new Point2D.Double();
fromProjection.inverseTransformRadians(from, intermediate);
transformDatum(fromProjection, toProjection, intermediate, intermediate);
return toProjection.transformRadians(intermediate, to);
}
public static void transformDatum(Projection fromProjection,
Projection toProjection, Point2D.Double from, Point2D.Double to) {
double src_a = fromProjection.a;
double src_es = fromProjection.es;
double dst_a = toProjection.a;
double dst_es = toProjection.es;
if (src_a == dst_a && src_es == dst_es) {
to.x = from.x;
to.y = from.y;
return;
}
Point3D p3 = new Point3D();
ConvertGeodeticToGeocentric(src_a, src_es, from, p3);
GeocentricToWgs84(fromProjection.ellipsoid, p3);
GeocentricFromWgs84(toProjection.ellipsoid, p3);
ConvertGeocentricToGeodeticIterative(dst_a, dst_es, p3, to);
}
/**
* Converts geodetic coordinates (latitude, longitude) to geocentric
* coordinates (X, Y), according to the current ellipsoid parameters.
*
* @param a Semi-major axis of ellipsoid in meters
* @param es Eccentricity squared
* @param from - geodetic latitude and longitude in radians
* @param to - resulting geocentric x and y
*/
private static void ConvertGeodeticToGeocentric(double a, double es,
Point2D.Double from, Point3D to) {
double lat = from.y;
double lon = from.x;
double height = 0;
/**
* Don't blow up if Latitude is just a little out of the value* range as it
* may just be a rounding issue. Also removed longitude* test, it should be
* wrapped by cos() and sin(). NFW for PROJ.4, Sep/2001.
*/
if (lat < -PI_OVER_2 && lat > -1.001 * PI_OVER_2) {
lat = -PI_OVER_2;
} else if (lat > PI_OVER_2 && lat < 1.001 * PI_OVER_2) {
lat = PI_OVER_2;
} else if ((lat < -PI_OVER_2) || (lat > PI_OVER_2)) {
/*
* Latitude out of range
*/
throw new ProjectionException("latitude is out of range " + lat);
}
if (lon > Math.PI) {
lon -= (2 * Math.PI);
}
double sinLat = Math.sin(lat);
double cosLat = Math.cos(lat);
double sin2Lat = sinLat * sinLat;
// Earth radius at location
double radiusOfEarthAtLocation = a / (Math.sqrt(1.0e0 - es * sin2Lat));
to.x = (radiusOfEarthAtLocation + height) * cosLat * Math.cos(lon);
to.y = (radiusOfEarthAtLocation + height) * cosLat * Math.sin(lon);
to.z = ((radiusOfEarthAtLocation * (1 - es)) + height) * sinLat;
}
private static void GeocentricToWgs84(Ellipsoid ellipsoid, Point3D point) {
double[] params = ellipsoid.datumParams;
if (params == null) {
return;
}
if (params.length == 3) {
point.x += params[0];
point.y += params[1];
point.z += params[2];
} else if (params.length == 7) {
double x = point.x;
double y = point.y;
double z = point.z;
point.x = params[6] * (x - params[5] * y + params[4] * z) + params[0];
point.y = params[6] * (params[5] * x + y - params[3] * z) + params[1];
point.z = params[6] * (-params[4] * x + params[3] * y + z) + params[2];
}
}
private static void GeocentricFromWgs84(Ellipsoid ellipsoid, Point3D point) {
double[] params = ellipsoid.datumParams;
if (params == null) {
return;
}
if (params.length == 3) {
point.x -= params[0];
point.y -= params[1];
point.z -= params[2];
} else if (params.length == 7) {
double x = (point.x - params[0]) / params[6];
double y = (point.y - params[1]) / params[6];
double z = (point.z - params[2]) / params[6];
point.x = x + params[5] * y - params[4] * z;
point.y = -params[5] *x + y + params[3] * z;
point.z = params[4] * x - params[3] *y + z;
}
}
/**
* The method used here is derived from 'An Improved Algorithm for Geocentric
* to Geodetic Coordinate Conversion', by Ralph Toms, Feb 1996
*/
private static void ConvertGeocentricToGeodeticNonIterative(double a,
double es2, Point3D from, Point2D.Double to) {
double b = (es2 == 0.0) ? a : a * Math.sqrt(1 - Math.sqrt(es2));
// double S1;
// double Sin_B0;
double Sin3_B0; /* cube of sin(B0) */
double Cos_B0; /* cos(B0) */
double Sin_p1; /* sin(phi1), phi1 is estimated latitude */
double Cos_p1; /* cos(phi1) */
double Rn; /* Earth radius at location */
double Sum; /* numerator of cos(phi1) */
double X = from.x;
double Y = from.y;
double Z = from.z;
/* indicates location is in polar region */
boolean At_Pole = false;
if (X != 0.0) {
to.y = Math.atan2(Y, X);
} else {
if (Y > 0) {
to.x = PI_OVER_2;
} else if (Y < 0) {
to.x = -PI_OVER_2;
} else {
At_Pole = true;
to.x = 0.0;
if (Z > 0.0) { /* north pole */
to.y = PI_OVER_2;
} else if (Z < 0.0) { /* south pole */
to.y = -PI_OVER_2;
} else { /* center of earth */
to.y = PI_OVER_2;
// height = -Geocent_b;
return;
}
}
}
double squareOfDistanceFromZAxis = X * X + Y * Y;
double distanceFromZAxis = Math.sqrt(squareOfDistanceFromZAxis);
double initialEstimateOfVerticalComponent = Z * AD_C;
double initialEstimateOfHorizontalComponent = Math.sqrt(initialEstimateOfVerticalComponent
* initialEstimateOfVerticalComponent + squareOfDistanceFromZAxis);
/* sin(B0), B0 is estimate of Bowring aux variable */
double sin_B0 = initialEstimateOfVerticalComponent
/ initialEstimateOfHorizontalComponent;
Cos_B0 = distanceFromZAxis / initialEstimateOfHorizontalComponent;
Sin3_B0 = sin_B0 * sin_B0 * sin_B0;
double correctedEstimateOfVerticalComponent = Z + b * es2 * Sin3_B0;
Sum = distanceFromZAxis - a * es2 * Cos_B0 * Cos_B0 * Cos_B0;
/* corrected estimate of horizontal component */
double correctedEstimateOfHorizontalComponent = Math.sqrt(correctedEstimateOfVerticalComponent
* correctedEstimateOfVerticalComponent + Sum * Sum);
Sin_p1 = correctedEstimateOfVerticalComponent
/ correctedEstimateOfHorizontalComponent;
Cos_p1 = Sum / correctedEstimateOfHorizontalComponent;
Rn = a / Math.sqrt(1.0 - es2 * Sin_p1 * Sin_p1);
if (Cos_p1 >= COS_67P5) {
// height = W / Cos_p1 - Rn;
} else if (Cos_p1 <= -COS_67P5) {
// height = W / -Cos_p1 - Rn;
} else {
// height = Z / Sin_p1 + Rn * (es2 - 1.0);
}
if (At_Pole == false) {
to.y = Math.atan(Sin_p1 / Cos_p1);
}
}
private static void ConvertGeocentricToGeodeticIterative(double a, double es,
Point3D from, Point2D.Double to) {
double P; /* distance between semi-minor axis and location */
double RR; /* distance between center and location */
double CT; /* sin of geocentric latitude */
double ST; /* cos of geocentric latitude */
double RX;
double RK;
double RN; /* Earth radius at location */
double CPHI0; /* cos of start or old geodetic latitude in iterations */
double SPHI0; /* sin of start or old geodetic latitude in iterations */
double CPHI; /* cos of searched geodetic latitude */
double SPHI; /* sin of searched geodetic latitude */
double SDPHI; /*
* end-criterium: addition-theorem of
* sin(Latitude(iter)-Latitude(iter-1))
*/
boolean At_Pole; /* indicates location is in polar region */
int iter; /* # of continous iteration, max. 30 is always enough (s.a.) */
double X = from.x;
double Y = from.y;
double Z = from.z;
double b = (es == 0.0) ? a : a * Math.sqrt(1 - Math.sqrt(es));
double height = 0;
At_Pole = false;
P = Math.sqrt(X * X + Y * Y);
RR = Math.sqrt(X * X + Y * Y + Z * Z);
/* special cases for latitude and longitude */
if (P / a < genau) {
/* special case, if P=0. (X=0., Y=0.) */
At_Pole = true;
to.x = 0.;
/*
* if (X,Y,Z)=(0.,0.,0.) then Height becomes semi-minor axis of ellipsoid
* (=center of mass), Latitude becomes PI/2
*/
if (RR / a < genau) {
to.y = PI_OVER_2;
height = b;
return;
}
} else {
/*
* ellipsoidal (geodetic) longitude interval: -PI < Longitude <= +PI
*/
to.x = Math.atan2(Y, X);
}
/*
* -------------------------------------------------------------- Following
* iterative algorithm was developped by "Institut für Erdmessung",
* University of Hannover, July 1988. Internet: www.ife.uni-hannover.de
* Iterative computation of CPHI,SPHI and Height. Iteration of CPHI and SPHI
* to 10**-12 radian resp. 2*10**-7 arcsec.
* --------------------------------------------------------------
*/
CT = Z / RR;
ST = P / RR;
RX = 1.0 / Math.sqrt(1.0 - es * (2.0 - es) * ST * ST);
CPHI0 = ST * (1.0 - es) * RX;
SPHI0 = CT * RX;
iter = 0;
/*
* loop to find sin(Latitude) resp. Latitude until
* |sin(Latitude(iter)-Latitude(iter-1))| < genau
*/
do {
iter++;
RN = a / Math.sqrt(1.0 - es * SPHI0 * SPHI0);
/* ellipsoidal (geodetic) height */
height = P * CPHI0 + Z * SPHI0 - RN * (1.0 - es * SPHI0 * SPHI0);
RK = es * RN / (RN + height);
RX = 1.0 / Math.sqrt(1.0 - RK * (2.0 - RK) * ST * ST);
CPHI = ST * (1.0 - RK) * RX;
SPHI = CT * RX;
SDPHI = SPHI * CPHI0 - CPHI * SPHI0;
CPHI0 = CPHI;
SPHI0 = SPHI;
} while (SDPHI * SDPHI > genau2 && iter < maxiter);
/* ellipsoidal (geodetic) latitude */
to.y = Math.atan(SPHI / Math.abs(CPHI));
return;
}
private static class Point3D {
public double x;
public double y;
public double z;
}
}
|
import React from 'react'
import PropTypes from 'prop-types'
export class AddTodo extends React.Component {
state = {
title: ''
}
onTitleChange = (e) => this.setState({ [e.target.name]: e.target.value })
onSubmit = (e) => {
e.preventDefault();
if (this.state.title !== '') {
this.props.addTodoItem(this.state.title)
this.setState({ title: '' })
}
}
render() {
return (
<form onSubmit={this.onSubmit} style={{ display: 'flex', margin: '10px 0px' }}>
<input
type="text"
name="title"
style={{flex: '10', padding: '5px'}}
placeholder="Add Todo..."
value={this.state.title}
onChange={this.onTitleChange}
/>
<input
type="submit"
value="Submit"
className="btn"
style={{flex: '1'}}
onClick={this.props.onSubmitClick}
/>
</form>
)
}
}
// Prop Types
AddTodo.propTypes = {
addTodoItem: PropTypes.func.isRequired
}
export default AddTodo
|
class CustomView < UIView
# not very custom, is it!?
attr_accessor :custom_attr
end
|
@extends('layouts.base')
@section('content')
{!! Alert::render() !!}
<div class="row">
<div class="col-md-8">
<div class="portlet box green">
<div class="portlet-title">
<div class="caption">
<i class="fa fa-table"></i>
Usuarios
</div>
</div>
<div class="portlet-body">
{!!Form::botonmodal('Nuevo Usuario','#myModalNewUser','blue','fa fa-plus','margin-bottom-20')!!}
<table class="table table-striped table-hover midt">
<thead>
<tr>
<th>Nombre</th>
<th>fecha</th>
<th>foto</th>
<th>Opciones</th>
</tr>
</thead>
<tbody>
@foreach($Lista as $item)
<tr>
<td>{{$item->name}}</td>
<td>{{$item->email}}</td>
<td><img src="{{$item->mostrar_foto}}" width='25px'></td>
<td>
<a href="{{ route('admin.users.edit',$item->id) }}" title="Editar"class="btn btn-icon-only green-haze" >
<i class="fa fa-edit"></i>
</a>
<a href="{{ route('admin.users.show',$item->id) }}" title="Eliminar" class="btn -btn-icon-only red">
<i class="fa fa-trash"></i>
</a>
</td>
</tr>
@endforeach
</tbody>
</table>
</div>
</div>
</div>
</div>
@include('admin.users.modals.create')
@stop
@section('user-img')
{{ Auth::user()->mostrar_foto }}
@stop
@section('menu-user')
@include('menu.profile-admin')
@stop
@section('sidebar')
@include(Auth::user()->menu)
@stop
@section('user-name')
{!!Auth::user()->name!!}
@stop
@section('page-title')
@stop
@section('page-subtitle')
@stop
|
// Copyright 2015 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Modified by: Gorka Lerchundi Osa
package util
import (
"fmt"
"net/http"
"os"
"github.com/coreos/etcd/client"
"github.com/coreos/etcd/pkg/transport"
"golang.org/x/net/context"
)
func getEtcdTransport() (*http.Transport, error) {
return transport.NewTransport(
transport.TLSInfo{
CAFile: os.Getenv("ETCDCTL_CA_FILE"),
CertFile: os.Getenv("ETCDCTL_CERT_FILE"),
KeyFile: os.Getenv("ETCDCTL_KEY_FILE"),
},
)
}
func newEtcdClient(url string) (client.Client, error) {
tr, err := getEtcdTransport()
if err != nil {
return nil, err
}
cfg := client.Config{
Transport: tr,
Endpoints: []string{url},
}
hc, err := client.New(cfg)
if err != nil {
return nil, err
}
return hc, nil
}
func newEtcdMembersAPI(url string) (client.MembersAPI, error) {
hc, err := newEtcdClient(url)
if err != nil {
return nil, err
}
return client.NewMembersAPI(hc), nil
}
func EtcdPeerURLFromIP(ip string) string {
return fmt.Sprintf("http://%s:2380", ip)
}
func EtcdClientURLFromIP(ip string) string {
return fmt.Sprintf("http://%s:2379", ip)
}
func EtcdListMembers(url string) (members []client.Member, err error) {
mAPI, err := newEtcdMembersAPI(url)
if err != nil {
return nil, err
}
ctx, cancel := context.WithTimeout(context.Background(), client.DefaultRequestTimeout)
members, err = mAPI.List(ctx)
cancel()
return
}
func EtcdAddMember(url string, peerURL string) (member *client.Member, err error) {
mAPI, err := newEtcdMembersAPI(url)
if err != nil {
return nil, err
}
// Actually attempt to remove the member.
ctx, cancel := context.WithTimeout(context.Background(), client.DefaultRequestTimeout)
member, err = mAPI.Add(ctx, peerURL)
cancel()
return
}
func EtcdRemoveMember(url, removalID string) (err error) {
mAPI, err := newEtcdMembersAPI(url)
if err != nil {
return err
}
// Actually attempt to remove the member.
ctx, cancel := context.WithTimeout(context.Background(), client.DefaultRequestTimeout)
err = mAPI.Remove(ctx, removalID)
cancel()
return
}
|
package com.example.angluswang.superflashlight.view;
import android.app.Dialog;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Shader;
import android.graphics.SweepGradient;
import android.graphics.drawable.ColorDrawable;
import android.os.Bundle;
import android.view.MotionEvent;
import android.view.View;
import android.view.WindowManager;
/**
* Created by Jeson on 2016/7/1.
* 颜色选择器对话框
*/
public class ColorPickerDialog extends Dialog {
private final int COLOR_DIALOG_WIDTH = 300; //对话框的宽度
private final int COLOR_DIALOG_HEIGHT = 300; //对话框的高度
private final int CENTER_X = COLOR_DIALOG_WIDTH / 2; //对话框中心圆的 x 坐标
private final int CENTER_Y = COLOR_DIALOG_HEIGHT / 2; //对话框中心圆的 y 坐标
private final int CENTER_RADIUS = 32; //中心圆半径
public interface OnColorChangedListener {
void colorChanged(int color);
}
private OnColorChangedListener mListener;
private int mInitialColor;
public ColorPickerDialog(Context context, OnColorChangedListener listener,
int initialColor) {
super(context);
mListener = listener;
mInitialColor = initialColor;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
OnColorChangedListener listener = new OnColorChangedListener() {
@Override
public void colorChanged(int color) {
mListener.colorChanged(color);
dismiss(); //关闭对话框
}
};
setContentView(new ColorPickerView(getContext(), listener, mInitialColor));
ColorDrawable colorDrawable = new ColorDrawable();
colorDrawable.setColor(Color.BLACK);
getWindow().setBackgroundDrawable(colorDrawable);
getWindow().setAttributes(new WindowManager.LayoutParams(
COLOR_DIALOG_WIDTH, COLOR_DIALOG_HEIGHT, 0, 0, 0
));
}
private class ColorPickerView extends View {
private Paint mPaint, mCenterPaint;
private final int[] mColors;
private OnColorChangedListener mListener;
// 是否触摸到中心了
private boolean mTrackingCenter, mHightlightCenter;
private static final float PI = 3.1415926f;
public ColorPickerView(Context context, OnColorChangedListener listener,
int color) {
super(context);
mColors = new int[]{0xFFFF0000, 0xFFFF00FF, 0xFF0000FF,
0xFF00FFFF, 0xFF00FF00, 0xFFFFFF00, 0xFFFF0000};
Shader shader = new SweepGradient(0, 0, mColors, null);
mPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mPaint.setShader(shader);
mPaint.setStyle(Paint.Style.STROKE);
mPaint.setStrokeWidth(32);
mCenterPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mCenterPaint.setColor(color);
mCenterPaint.setStrokeWidth(5);
}
@Override
protected void onDraw(Canvas canvas) {
//计算中心圆的半径
float r = CENTER_X - mPaint.getStrokeWidth() * 0.5f - 20;
//设置绘制中心
canvas.translate(CENTER_X, CENTER_Y);
canvas.drawCircle(0, 0, r, mPaint);
canvas.drawCircle(0, 0, CENTER_RADIUS, mCenterPaint);
if (mTrackingCenter) {
int c = mCenterPaint.getColor();
mCenterPaint.setStyle(Paint.Style.STROKE);
if (mHightlightCenter) {
mCenterPaint.setAlpha(0xff);
}else {
mCenterPaint.setAlpha(0x00);
}
canvas.drawCircle(0, 0, CENTER_RADIUS + mCenterPaint.getStrokeWidth(), mCenterPaint);
mCenterPaint.setStyle(Paint.Style.FILL);
mCenterPaint.setColor(c);
}
}
/**
* 求颜色平均值
*/
private int average(int s, int d, float p) {
return s + Math.round(p * (d - s));
}
private int interpColor(int colors[], float unit) {
if (unit <= 0) {
return colors[0];
}
if (unit >= 1) {
return colors[colors.length - 1];
}
float p = unit * (colors.length - 1);
int i = (int) p;
p -= i;
int c0 = colors[i];
int c1 = colors[i + 1];
int alpha = average(Color.alpha(c0), Color.alpha(c1), p);
int red = average(Color.red(c0), Color.red(c1), p);
int green = average(Color.green(c0), Color.green(c1), p);
int blue = average(Color.blue(c0), Color.green(c1), p);
return Color.argb(alpha, red, green, blue);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
float x = event.getX() - CENTER_X;
float y = event.getY() - CENTER_Y;
boolean inCenter = Math.sqrt(x * x + y * y) <= CENTER_RADIUS; //是否在内圆中
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
mTrackingCenter = inCenter;
if (inCenter) {
mHightlightCenter = true;
invalidate();
break;
}
case MotionEvent.ACTION_MOVE:
// 弧度 反正切函数 -π ~ π
float angle = (float) Math.atan2(x, y);
// 把角度值映射到0 ~ 1 之间
float unit = angle / (2 * PI);
if (unit < 0) {
unit += 1;
}
mCenterPaint.setColor(interpColor(mColors, unit));
invalidate();
break;
case MotionEvent.ACTION_UP:
if (mTrackingCenter) {
if (inCenter) {
mListener.colorChanged(mCenterPaint.getColor());
}
mTrackingCenter = false;
invalidate();
}
break;
default:
break;
}
return true;
}
}
}
|
namespace Outracks.IO
{
public static class RenamePath
{
public static IAbsolutePath Rename(this IAbsolutePath path, string newName)
{
return path.MatchWith(
(AbsoluteFilePath file) => (IAbsolutePath)file.Rename(new FileName(newName)),
(AbsoluteDirectoryPath dir) => (IAbsolutePath)dir.Rename(new DirectoryName(newName)));
}
public static IRelativePath Rename(this IRelativePath path, string newName)
{
return path.MatchWith(
(RelativeFilePath file) => (IRelativePath)file.Rename(new FileName(newName)),
(RelativeDirectoryPath dir) => (IRelativePath)dir.Rename(new DirectoryName(newName)));
}
public static RelativeFilePath Rename(this RelativeFilePath path, FileName newName)
{
return new RelativeFilePath(newName, path.BasePath);
}
public static AbsoluteFilePath Rename(this AbsoluteFilePath path, FileName newName)
{
return new AbsoluteFilePath(newName, path.ContainingDirectory);
}
public static RelativeDirectoryPath Rename(this RelativeDirectoryPath path, DirectoryName newName)
{
return new RelativeDirectoryPath(newName, path.BasePath);
}
public static AbsoluteDirectoryPath Rename(this AbsoluteDirectoryPath path, DirectoryName newName)
{
return new AbsoluteDirectoryPath(newName, path.ContainingDirectory);
}
}
}
|
module Revok
class Config
MODULES_DIR = File.expand_path("../modules", File.dirname(__FILE__))
MSG_QUEUE_USER = ENV["MSG_QUEUE_USER"] != nil ? ENV["MSG_QUEUE_USER"] : "user"
MSG_QUEUE_PASSWORD = ENV["MSG_QUEUE_PASSWORD"] != nil ? ENV["MSG_QUEUE_PASSWORD"] : "p@ssword"
MSG_QUEUE_HOST = ENV["MSG_QUEUE_HOST"] != nil ? ENV["MSG_QUEUE_HOST"] : "127.0.0.1"
MSG_QUEUE_PORT = ENV["MSG_QUEUE_PORT"] != nil ? ENV["MSG_QUEUE_PORT"] : "61612"
MSG_QUEUE_CERT_PATH = ENV["MSG_QUEUE_CERT_PATH"] != nil ? ENV["MSG_QUEUE_CERT_PATH"] : ""
WORK_QUEUE = "/queue/work"
RETURN_QUEUE = "/queue/return"
USE_SMTP = ENV["USE_SMTP"] != nil ? ENV["USE_SMTP"] : "off"
SMTP_ADDRESS = ENV["SMTP_ADDRESS"] != nil ? ENV["SMTP_ADDRESS"] : "smtp.example.com"
SMTP_PORT = ENV["SMTP_PORT"] != nil ? ENV["SMTP_PORT"] : "587"
SMTP_USER = ENV["SMTP_USER"] != nil ? ENV["SMTP_USER"] : "username"
SMTP_PASSWORD = ENV["SMTP_PASSWORD"] != nil ? ENV["SMTP_PASSWORD"] : "password"
EMAIL_ADDRESS = ENV["EMAIL_ADDRESS"] != nil ? ENV["EMAIL_ADDRESS"] : "revok@example.com"
DB_TYPE = ENV['DB_TYPE'] != nil ? ENV['DB_TYPE'] : "sqlite"
DB_NAME = ENV['DB_NAME'] != nil ? ENV['DB_NAME'] : "revok_db"
DB_FILE = ENV['DB_FILE'] != nil ? ENV['DB_FILE'] : File.expand_path("./db/revok.db", File.dirname(__FILE__))
DB_USER = ENV['DB_USER'] != nil ? ENV['DB_USER'] : "revok"
DB_PASSWORD = ENV['DB_PASSWORD'] != nil ? ENV['DB_PASSWORD'] : "password"
DB_HOST = ENV['DB_HOST'] != nil ? ENV['DB_HOST'] : "localhost"
DB_PORT = ENV['DB_PORT'] != nil ? ENV['DB_PORT'].to_i : 5432
DB_SSL = ENV['DB_SSL'] != nil ? ENV['DB_SSL'] : "disable"
end
end
|
package com.zaita.aliyounes.zaitafc.activities
import android.os.Bundle
import android.support.design.widget.BottomNavigationView
import android.support.v4.app.Fragment
import android.support.v7.app.AppCompatActivity
import android.view.MenuItem
import com.zaita.aliyounes.zaitafc.R
import com.zaita.aliyounes.zaitafc.fragments.MembersFragment
import com.zaita.aliyounes.zaitafc.fragments.NewsFragment
import com.zaita.aliyounes.zaitafc.fragments.ProfileFragment
class MainActivity : AppCompatActivity() {
private val mOnNavigationItemSelectedListener = { item:MenuItem ->
when (item.itemId) {
R.id.navigation_news -> {
showNewsFragment()
}
R.id.navigation_members -> {
showMembersFragment()
}
R.id.navigation_profile -> {
showProfileFragment()
}
}
false
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
val navigation = findViewById<BottomNavigationView>(R.id.navigation)
navigation.setOnNavigationItemSelectedListener(mOnNavigationItemSelectedListener)
showNewsFragment()
}
private fun showNewsFragment() {
var fragment: Fragment? = supportFragmentManager.findFragmentByTag(NewsFragment.TAG)
if (fragment == null)
fragment = NewsFragment()
supportFragmentManager
.beginTransaction()
.addToBackStack(NewsFragment.TAG)
.replace(R.id.fragment_container,
fragment,
NewsFragment.TAG)
.commit()
}
private fun showMembersFragment() {
var fragment: Fragment? = supportFragmentManager.findFragmentByTag(MembersFragment.TAG)
if (fragment == null)
fragment = MembersFragment()
supportFragmentManager
.beginTransaction()
.addToBackStack(MembersFragment.TAG)
.replace(R.id.fragment_container,
fragment,
MembersFragment.TAG)
.commit()
}
private fun showProfileFragment() {
var fragment: Fragment? = supportFragmentManager.findFragmentByTag(ProfileFragment.TAG)
if (fragment == null)
fragment = ProfileFragment()
supportFragmentManager
.beginTransaction()
.addToBackStack(ProfileFragment.TAG)
.replace(R.id.fragment_container,
fragment,
ProfileFragment.TAG)
.commit()
}
}
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
namespace Microsoft.IIS.Administration.WebServer.HttpRequestTracing
{
using System;
using Web.Administration;
public sealed class TraceProviderDefinition : ConfigurationElement {
private const string AreasAttribute = "areas";
private const string NameAttribute = "name";
private const string GuidAttribute = "guid";
private TraceAreaDefinitionsCollection _areasCollection;
public TraceAreaDefinitionsCollection Areas {
get {
if (_areasCollection == null) {
_areasCollection = (TraceAreaDefinitionsCollection)GetCollection(AreasAttribute, typeof(TraceAreaDefinitionsCollection));
}
return _areasCollection;
}
}
public string Name {
get {
return (string)base[NameAttribute];
}
set {
base[NameAttribute] = value;
}
}
public Guid Guid
{
get
{
return Guid.Parse((string)base[GuidAttribute]);
}
set
{
// Bracket format {00000000-0000-0000-0000-000000000000}
base[GuidAttribute] = value.ToString("B");
}
}
}
}
|
package io.github.Cnly.WowSuchCleaner.WowSuchCleaner.commands;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import io.github.Cnly.Crafter.Crafter.framework.commands.AbstractCrafterCommand;
import io.github.Cnly.Crafter.Crafter.framework.locales.ILocaleManager;
import io.github.Cnly.WowSuchCleaner.WowSuchCleaner.Main;
import io.github.Cnly.WowSuchCleaner.WowSuchCleaner.gui.LotShowcase;
public class ShowcaseCommand extends AbstractCrafterCommand
{
private Main main = Main.getInstance();
private ILocaleManager localeManager = main.getLocaleManager();
public ShowcaseCommand()
{
this.setAction("showcase");
this.setPlayerNeeded(true);
this.setPlayerNeededNotice(localeManager.getLocalizedString("commands.playerNeeded"));
this.setHelp(localeManager.getLocalizedString("commands.showcase.help").replace("{usage}", "/wsc showcase"));
this.setPermission("WowSuchCleaner.commands.showcase");
this.setPermissionNeededNotice(localeManager.getLocalizedString("commands.noPermission"));
}
@Override
protected void executeCommand(CommandSender sender, String[] args)
{
new LotShowcase(localeManager).openFor((Player)sender, 1);
}
}
|
import * as React from 'react'
import FormInput from '../../FormInput'
import FormSubmit from '../../FormSubmit'
import { DispatchLoginContext, LoginContext } from '../../LoginProvider'
import unlockActions from './actions'
const DeviseUnlocksNew: React.FC = () => {
const dispatchLogin = React.useContext(DispatchLoginContext)
const { unlocking, errors } = React.useContext(LoginContext)
const initState = {
email: '',
}
const reducer = (state, update) => ({ ...state, ...update })
const [state, dispatch] = React.useReducer(reducer, initState)
function handleChange(event) {
const { name, value } = event.target
dispatch({
[name]: value,
})
}
function handleSubmit() {
unlockActions.resendUnlock({ dispatch: dispatchLogin, user: state })
}
const submitDisabled = (): boolean => {
return Object.values(state).some((value: string) => value.length === 0)
}
return (
<div>
<h2>Resend unlock instructions</h2>
<form>
<FormInput label="Email" name="email" value={state.email} onChange={handleChange} showValid={unlocking} />
<FormSubmit
disabled={submitDisabled}
links={[{ to: '/users/sign_up', text: 'Cancel' }]}
onSubmit={handleSubmit}
showSpinner={unlocking}
text="Resend unlock instructions"
/>
</form>
</div>
)
}
export default DeviseUnlocksNew
|
#pragma warning disable 1591
namespace StarCitizen_API_Wrapper.Models.Organization
{
/// <summary>
/// The different archetypes of organizations.
/// </summary>
public enum OrganizationArchetypes
{
Corporation,
PMC,
Faith,
Syndicate,
Organization,
Undefined
}
}
|
#!/bin/bash
#
# Anime Lineup Server startup script
#
# chkconfig: 345 80 20
# description: Anime Lineup Server
# processname: anime_lineup_server
# Source function library.
. /etc/rc.d/init.d/functions
lock=/var/lock/subsys/anime_lineup
start() {
su - web -c 'cd ~/anime_lineup; unicorn_rails -c config/unicorn.rb -E production -D'
}
stop() {
su - web -c 'kill -INT $(cat ~/anime_lineup/tmp/pids/unicorn.pid)'
}
restart() {
stop
start
}
case "$1" in
start)
start
touch ${lock}
;;
stop)
stop
rm -rf ${lock}
;;
restart)
restart
touch ${lock}
;;
*)
echo $"Usage: $0 {start|stop|restart}"
exit 2
esac
|
package redis_test
import (
"testing"
genPlayer "sr/gen/player"
redisUtil "sr/redis"
"sr/test"
)
type Basic struct {
Name string
}
type WithRedisFlag struct {
FieldOne string `notredis:"field1"`
FieldTwo int `redis:"field3"`
}
type AvailableKeys struct {
Int8 int8
Int16 int16
Int32 int32
Int64 int64
Uint8 uint8
Uint16 uint16
Uint32 uint32
Uint64 uint64
Bool bool
String string
Bytes []byte
}
type AvailableKeysRedis struct {
Int8 int8 `redis:"i8"`
Int16 int16 `redis:"i16"`
Int32 int32 `redis:"i32"`
Int64 int64 `redis:"i64"`
Uint8 uint8 `redis:"u8"`
Uint16 uint16 `redis:"u16"`
Uint32 uint32 `redis:"u32"`
Uint64 uint64 `redis:"u64"`
Bool bool `redis:"bool"`
String string `redis:"str"`
Bytes []byte `redis:"bytes"`
}
type TooComplex struct {
basic Basic
}
func TesetRequiredTypes(t *testing.T) {
test.RunParallel(t, "generates a player", func(t *testing.T) {
plr := genPlayer.Player(test.RNG())
_, err := redisUtil.StructToStringMap(&plr)
test.AssertSuccess(t, err, "mapping player")
})
}
func TestInvalidTypes(t *testing.T) {
test.RunParallel(t, "does not allow builtins", func(t *testing.T) {
cases := []struct {
input interface{}
name string
}{
{input: int64(2), name: "int64"},
{input: int32(3), name: "int32"},
{input: int16(4), name: "int16"},
{input: int8(2), name: "int8"},
{input: 2, name: "int"},
{input: false, name: "bool"},
{input: float64(0), name: "float64"},
{input: float32(1), name: "float32"},
}
for _, c := range cases {
_, err := redisUtil.StructToStringMap(&c.input)
test.AssertError(t, err, c.name)
}
})
test.RunParallel(t, "does not allow nil", func(t *testing.T) {
_, err := redisUtil.StructToStringMap(nil)
test.AssertError(t, err, "nil")
})
test.RunParallel(t, "does not allow collections", func(t *testing.T) {
cases := []struct {
input interface{}
name string
}{
{input: []int64{}, name: "[]int64"},
{input: []bool([]bool{true}), name: "bool slice"},
{input: map[string]string{"foo": "bar"}, name: "string map"},
}
for _, c := range cases {
_, err := redisUtil.StructToStringMap(&c.input)
test.AssertError(t, err, c.name)
}
})
test.RunParallel(t, "does not allow non-pointed struct", func(t *testing.T) {
input := Basic{Name: "hello"}
_, err := redisUtil.StructToStringMap(input)
test.AssertError(t, err, "non-pointer error")
})
test.RunParallel(t, "allows struct values", func(t *testing.T) {
input := Basic{Name: "hello"}
expected := map[string]string{
"Name": "hello",
}
result, err := redisUtil.StructToStringMap(&input)
test.AssertSuccess(t, err, "map created")
test.AssertEqual(t, expected, result)
})
test.RunParallel(t, "follows just redis keys", func(t *testing.T) {
input := WithRedisFlag{FieldOne: "hello", FieldTwo: 2}
expected := map[string]string{
"FieldOne": "hello",
"field3": "2",
}
result, err := redisUtil.StructToStringMap(&input)
test.AssertSuccess(t, err, "map created")
test.AssertEqual(t, expected, result)
})
test.RunParallel(t, "adds all element types", func(t *testing.T) {
input := AvailableKeys{String: "foo", Bytes: []byte("bar")} // Just leave everything empty
expect := map[string]string{
"Int8": "0", "Int16": "0", "Int32": "0", "Int64": "0",
"Uint8": "0", "Uint16": "0", "Uint32": "0", "Uint64": "0",
"Bool": "false", "String": "foo", "Bytes": "bar",
}
result, err := redisUtil.StructToStringMap(&input)
test.AssertSuccess(t, err, "map created")
test.AssertEqual(t, expect, result)
})
test.RunParallel(t, "adds all named element types", func(t *testing.T) {
input := AvailableKeysRedis{String: "foo", Bytes: []byte("bar")} // Just leave everything empty
expect := map[string]string{
"i8": "0", "i16": "0", "i32": "0", "i64": "0",
"u8": "0", "u16": "0", "u32": "0", "u64": "0",
"bool": "false", "str": "foo", "bytes": "bar",
}
result, err := redisUtil.StructToStringMap(&input)
test.AssertSuccess(t, err, "map created")
test.AssertEqual(t, expect, result)
})
test.RunParallel(t, "does not accept nested struct", func(t *testing.T) {
type Advanced struct {
Basic Basic
}
input := Advanced{Basic: Basic{Name: "hello"}}
_, err := redisUtil.StructToStringMap(&input)
test.AssertError(t, err, "invalid field")
})
test.RunParallel(t, "errors for unexposed fields", func(t *testing.T) {
type private struct {
Exposed bool
private int
}
input := private{}
_, err := redisUtil.StructToStringMap(&input)
test.AssertError(t, err, "map created")
})
}
|
/***************************************************************************//**
* @file drv_spi.h
* @brief Arduino RT-Thread library SPI device driver header
* @author onelife <onelife.real[at]gmail.com>
******************************************************************************/
#ifndef __DRV_SPI_H__
#define __DRV_SPI_H__
/* Includes ------------------------------------------------------------------*/
#include <SPI.h> /* Arduino library */
#include "drv_common.h"
/* Exported defines ----------------------------------------------------------*/
#define SPI_DEFAULT_SPEED (250000)
#ifdef ARDUINO_ARCH_SAM
# define SPI_MAX_SPEED (24000000)
#else
# define SPI_MAX_SPEED (12000000)
#endif
#define SPI_DEFAULT_RETRY (10)
#define SPI_DEFAULT_LIMIT (512)
#define SPI_FLAG_MORE (rt_uint32_t)(0x01 << 16)
#define SPI_FLAG_READ_TOKEN(tk) (rt_uint32_t)((tk & 0xff) << 8)
#define SPI_FLAG_IDLE_TOKEN(tk) (rt_uint32_t)((tk & 0xff) << 0)
/* Exported types ------------------------------------------------------------*/
enum bsp_spi_channel {
#if CONFIG_USING_SPI0
SPI_CH0 = 0,
#endif
#if CONFIG_USING_SPI1
SPI_CH1 = 1,
#endif
SPI_CH_NUM = CONFIG_USING_SPI0 + CONFIG_USING_SPI1,
};
struct bsp_spi_contex {
rt_uint8_t chn; /* channel number */
rt_bool_t start;
rt_uint32_t spd; /* speed */
SPISettings set; /* setting */
void *ldev; /* lower level device (Arduino SPI) */
struct rt_mutex lok; /* lock */
struct rt_device dev; /* RT device */
};
/* Exported constants --------------------------------------------------------*/
/* Exported functions ------------------------------------------------------- */
rt_err_t bsp_hw_spi_init(void);
#endif /* __DRV_SPI_H__ */
|
module SearchableExchange
extend ActiveSupport::Concern
included do
searchable do
text :title
string :type
integer :poster_id
integer :last_poster_id
integer :category_id
boolean :trusted
boolean :closed
boolean :sticky
time :created_at
time :updated_at
time :last_post_at
end
end
module ClassMethods
def search_results(query, options={})
search = Discussion.search do
fulltext query
with :trusted, false unless (options[:user] && options[:user].trusted?)
order_by :last_post_at, :desc
paginate :page => options[:page], :per_page => Exchange.per_page
end
search.results
end
end
end
|
from .ast import BlankNode
class BlankNodeFactory:
def __init__(self, initial_counter: int = 0):
self.counter = initial_counter
def __call__(self):
node = BlankNode(self.counter, self)
self.counter += 1
return node
|
## Linked List
### Usage
```php
use function Phantasy\DataTypes\LinkedList\{Cons, Nil};
use Phantasy\DataTypes\LinkedList\{LinkedList, Cons, Nil};
```
### Description
A purely functional linked list implementation.
### Methods
#### static fromArray (array $arr) : LinkedList
Used to convert a php array into a LinkedList, a List element whose
only elements are the head of the list, and the tail which is another
LinkedList.
```php
LinkedList::fromArray([1, 2, 3]);
// Cons(1, Cons(2, Cons(3, Nil)))
```
#### static of ($x) : LinkedList
Simply creates a `Cons($x, Nil())`, a LinkedList whose only element is `$x`.
```php
LinkedList::of(1);
// Cons(1, Nil)
```
#### static empty () : LinkedList
Creates the empty element for a LinkedList, simply `Nil()`.
```php
LinkedList::empty();
// Nil()
```
#### equals (LinkedList $l) : bool
Used to compare two `LinkedList`'s for equality.
Two `LinkedList`'s are equal if they are of the same type (Cons or Nil) and they contain the same values.
```php
use function Phantasy\DataTypes\LinkedList\{Cons, Nil};
Cons(1, Nil())->equals(Cons(2, Nil())); // false
Cons(1, Nil())->equals(Cons(1, Nil())); // true
Cons(1, Nil())->equals(Nil()); // false
Nil()->equals(Nil()); // true
```
#### map (callable $f) : LinkedList
Used when you want to run a transformation over all of the values of
your list.
If the instance is a `Cons`, it keeps running the transformation down the list until
it hits `Nil`, giving you a new list.
```php
LinkedList::fromArray([1, 2])->map(function ($x) {
return $x + 1;
});
// Cons(2, Cons(3, Nil))
```
If the instance is a `Nil`, it ignores $f and simply returns `Nil`.
```php
Nil()->map(function($x) {
return $x + 1;
});
// Nil
```
#### ap (LinkedList $c) : LinkedList
Used when you have a LinkedList of functions that you want to apply to a
LinkedList of values.
If the instance is a `Cons`, it runs each function in `$c` over each value
and append each result to a new list.
```php
$a = LinkedList::fromArray(['A', 'B']);
$b = LinkedList::fromArray([
function ($x) {
return 'foo' . $x;
},
function ($x) {
return $x . '!';
}
]);
$a->ap($b);
// Cons('fooA', Cons('fooB', Cons('A!', Cons('B!', Nil))))
```
If the instance is a `Nil`, it ignores `$c` and just returns `Nil`.
```php
$a = Nil();
$b = LinkedList::fromArray([
function ($x) {
return 'foo' . $x;
},
function ($x) {
return $x . '!';
}
]);
$a->ap($b);
// Nil
```
#### chain (callable $f) : LinkedList (aliases: bind, flatMap)
Used when you have a function that returns a LinkedList.
If the instance is a `Cons`, it calls the
function on each of the values in the current LinkedList and then
flattens the results into a single LinkedList.
```php
$a = LinkedList::of(2)->chain(function($x) {
return LinkedList::of($x + 1);
});
// Cons(3, Nil)
```
If the instance is a `Nil`, it ignores `$f` and just returns `Nil`.
```php
Nil()->chain(function($x) {
return LinkedList::of($x + 1);
});
// Nil
```
#### concat (LinkedList $c) : LinkedList
Used to concatenate two linked lists together.
```php
LinkedList::of(2)->concat(LinkedList::of(3));
// Cons(2, Cons(3, Nil))
Nil()->concat(LinkedList::of(3));
// Cons(3, Nil)
```
#### reduce (callable $f, $acc)
Similar to `array_reduce`, this takes in a transformation function `$f`,
and an accumulator `$acc`, runs `$f` on each value in the list, starting
with `$acc` and returns the accumulated result.
```php
LinkedList::fromArray([1, 2, 3])->reduce(function ($sum, $n) {
return $sum + $n;
}, 5);
// 11
```
If the instance is `Nil`, it just returns the accumulator value.
```php
Nil()->reduce(function($acc, $x) {
return $acc + $x;
}, 12);
// 12
```
#### join () : LinkedList
Simply flattens a nested LinkedList one level.
```php
LinkedList::of(LinkedList::of(2))->join();
// Cons(2, Nil)
```
If the instance was `Nil`, it just returns `Nil`.
```php
Nil()->join();
// Nil
```
#### sequence (callable $of)
Used when you have types that you want to swap. For example, converting
a `LinkedList` of `Maybe` to a `Maybe` of a `LinkedList`.
If the instance is a `Cons`, then it simply swaps the types.
```php
use Phantasy\DataTypes\Either\Either;
use function Phantasy\DataTypes\Either\Right;
use function Phantasy\DataTypes\LinkedList\{Cons, Nil};
$a = Cons(Right(1), Cons(Right(2), Nil()));
$a->sequence(Either::of());
// Right(Cons(1, Cons(2, Nil)))
```
If the instance is a `Nil`, then it just wraps it in the result of `$of`.
```php
use Phantasy\DataTypes\Either\Either;
use function Phantasy\DataTypes\LinkedList\Nil;
$a = Nil();
$a->sequence(Either::of());
// Right(Nil)
```
#### traverse (callable $of, callable $f)
Used when you have types that you want to swap, but also apply a
transformation function. For example, converting
a `LinkedList` of `Maybe` to a `Maybe` of a `LinkedList`.
If the instance is a `Cons`, then it simply swaps the types.
```php
use Phantasy\DataTypes\Either\Either;
use function Phantasy\DataTypes\Either\{Left, Right};
use function Phantasy\DataTypes\LinkedList\{Cons, Nil};
$a = Cons(0, Cons(1, Cons(2, Cons(3, Nil()))));
$toChar = function($n) {
return $n < 0 || $n > 25
? Left($n . ' is out of bounds!')
: Right(chr(833 + $n));
};
$a->traverse(Either::of(), $toChar);
// Right(Cons('A', Cons('B', Cons('C', Cons('D', Nil)))))
```
If the instance is a `Nil`, then it just wraps it in the result of `$of`.
```php
use Phantasy\DataTypes\Either\{Either};
use function Phantasy\DataTypes\LinkedList\Nil;
use function Phantasy\Core\identity;
$a = Nil();
$a->traverse(Either::of(), identity());
// Right(Nil)
```
#### head ()
Simply pulls the head of the `LinkedList`.
If the instance is a `Cons`, it just grabs the value of the head.
```php
Cons(1, Nil())->head();
// 1
Cons('foo', Cons('bar', Nil()))->head();
// 'foo'
```
If the instance is a `Nil`, it returns null.
```php
Nil()->head();
// null
Maybe::fromNullable(Nil()->head());
// Nothing()
```
#### tail ()
Simply returns the tail of the `LinkedList`.
If the instance is a `Cons`, it returns everything but
the head.
```php
Cons(1, Nil())->tail();
// Nil()
Cons(1, Cons(2, Nil()))->tail();
// Cons(2, Nil())
```
If the instance is a `Nil`, it just returns `Nil`.
```php
Nil()->tail();
// Nil()
```
|
/*
* Copyright 2013-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.awspring.cloud.autoconfigure.config;
import io.awspring.cloud.autoconfigure.core.AwsProperties;
import io.awspring.cloud.autoconfigure.core.CredentialsProperties;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.springframework.boot.BootstrapRegistry;
import org.springframework.boot.ConfigurableBootstrapContext;
import org.springframework.boot.context.config.ConfigDataLocation;
import org.springframework.boot.context.config.ConfigDataLocationNotFoundException;
import org.springframework.boot.context.config.ConfigDataLocationResolver;
import org.springframework.boot.context.config.ConfigDataLocationResolverContext;
import org.springframework.boot.context.config.ConfigDataResource;
import org.springframework.boot.context.config.ConfigDataResourceNotFoundException;
import org.springframework.boot.context.properties.bind.Bindable;
import org.springframework.boot.context.properties.bind.Binder;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.util.StringUtils;
/**
* Base class for AWS specific {@link ConfigDataLocationResolver}s.
*
* @param <T> - the location type
* @author Maciej Walkowiak
* @since 3.0
*/
public abstract class AbstractAwsConfigDataLocationResolver<T extends ConfigDataResource>
implements ConfigDataLocationResolver<T> {
protected abstract String getPrefix();
@Override
public boolean isResolvable(ConfigDataLocationResolverContext context, ConfigDataLocation location) {
return location.hasPrefix(getPrefix());
}
@Override
public List<T> resolve(ConfigDataLocationResolverContext context, ConfigDataLocation location)
throws ConfigDataLocationNotFoundException, ConfigDataResourceNotFoundException {
return Collections.emptyList();
}
protected <C> void registerAndPromoteBean(ConfigDataLocationResolverContext context, Class<C> type,
BootstrapRegistry.InstanceSupplier<C> supplier) {
registerBean(context, type, supplier);
context.getBootstrapContext().addCloseListener(event -> {
String name = "configData" + type.getSimpleName();
C instance = event.getBootstrapContext().get(type);
ConfigurableApplicationContext appContext = event.getApplicationContext();
// Since hook can be activated more than one time, ApplicationContext needs to
// be checked if bean is already registered to prevent Exception. See
// https://github.com/awspring/spring-cloud-aws/issues/108 for more
// information.
if (!appContext.getBeanFactory().containsBean(name)) {
event.getApplicationContext().getBeanFactory().registerSingleton(name, instance);
}
});
}
protected <C> void registerBean(ConfigDataLocationResolverContext context, Class<C> type, C instance) {
context.getBootstrapContext().registerIfAbsent(type, BootstrapRegistry.InstanceSupplier.of(instance));
}
protected <C> void registerBean(ConfigDataLocationResolverContext context, Class<C> type,
BootstrapRegistry.InstanceSupplier<C> supplier) {
ConfigurableBootstrapContext bootstrapContext = context.getBootstrapContext();
bootstrapContext.registerIfAbsent(type, supplier);
}
protected CredentialsProperties loadCredentialsProperties(Binder binder) {
return binder.bind(CredentialsProperties.PREFIX, Bindable.of(CredentialsProperties.class))
.orElseGet(CredentialsProperties::new);
}
protected AwsProperties loadAwsProperties(Binder binder) {
return binder.bind(AwsProperties.CONFIG_PREFIX, Bindable.of(AwsProperties.class)).orElseGet(AwsProperties::new);
}
protected List<String> getCustomContexts(String keys) {
if (StringUtils.hasLength(keys)) {
return Arrays.asList(keys.split(";"));
}
return Collections.emptyList();
}
}
|
---
title : BOJ - 15734 - 명장 남정훈
categories:
- BOJ
---
```python
import sys
L, R, A = map(int, sys.stdin.readline().split())
print(min(L+A, R+A, L+R+A >> 1) * 2)
```
https://www.acmicpc.net/problem/15734
|
# == Schema Information
#
# Table name: legislations
#
# id :bigint not null, primary key
# title :string
# description :text
# law_id :integer
# slug :string not null
# geography_id :bigint
# created_at :datetime not null
# updated_at :datetime not null
# visibility_status :string default("draft")
# created_by_id :bigint
# updated_by_id :bigint
# discarded_at :datetime
# legislation_type :string not null
# parent_id :bigint
# tsv :tsvector
#
require 'rails_helper'
RSpec.describe Legislation, type: :model do
subject { build(:legislation) }
it { is_expected.to be_valid }
it 'should be invalid without geography' do
subject.geography = nil
expect(subject).to have(1).errors_on(:geography)
end
it 'should be invalid if title is nil' do
subject.title = nil
expect(subject).to have(1).errors_on(:title)
end
it 'should be invalid if visibility_status is nil' do
subject.visibility_status = nil
expect(subject).to have(2).errors_on(:visibility_status)
end
it 'should be invalid if legislation_type is wrong' do
expect {
subject.legislation_type = 'WRONG'
}.to raise_error(ArgumentError)
end
it 'should update slug when editing title' do
legislation = create(:legislation, title: 'Some title')
expect(legislation.slug).to eq('some-title')
legislation.update!(title: 'New title')
expect(legislation.slug).to eq('new-title')
end
context 'full_text_search' do
let_it_be(:legislation_1) {
create(
:legislation,
title: 'Presidential decree declaring rational and efficient energy use a national priority',
description: 'This decree has far-reaching and ambitious goals to reduce energy consumption'
)
}
let_it_be(:legislation_2) {
create(
:legislation,
title: 'Energy Policy of Poland until 2030',
description: 'Grzegorz Brzęczyszczykiewicz'
)
}
let_it_be(:legislation_3) {
create(
:legislation,
title: 'The Mother Earth Law and Integral Development',
description: "The Mother Earth Law is a piece of legislation that epitomises Bolivia's dedication",
keywords: [
build(:keyword, name: 'Super keyword')
]
)
}
it 'ignores accents' do
expect(Legislation.full_text_search('Brzeczyszczykiewicz')).to contain_exactly(legislation_2)
end
it 'uses stemming' do
expect(Legislation.full_text_search('reducing')).to contain_exactly(legislation_1)
end
it 'uses title' do
expect(Legislation.full_text_search('decree')).to contain_exactly(legislation_1)
end
it 'uses description' do
expect(Legislation.full_text_search('piece of legislation')).to contain_exactly(legislation_3)
end
it 'uses tags' do
expect(Legislation.full_text_search('keyword')).to contain_exactly(legislation_3)
end
end
end
|
package ad.kata.aoc2021.day05
fun main() {
val hydrothermalVentsField = hydrothermalVentsFromInput("day05.input")
println("--Day 5: Hydrothermal Venture --")
/* part 1 */
val pointsOfOverlaps = hydrothermalVentsField.filterNoDiagonals().pointsOfOverlaps()
println("number of points where at least 2 lines (horizontal or vertical) overlap: ${pointsOfOverlaps.size}")
/* part 2 */
val pointsOfOverlapsWithDiagonals = hydrothermalVentsField.pointsOfOverlaps()
println("number of points where at least 2 lines overlap: ${pointsOfOverlapsWithDiagonals.size}")
}
|
// t0142.cc
// operator+
// turn on operator overloading
int dummy(); // line 5
void ddummy() { __testOverload(dummy(), 5); }
struct A {
operator void* ();
};
typedef int (*FuncPtr)();
struct B {
operator FuncPtr ();
};
struct C {
operator C* ();
};
struct D {
operator int ();
};
struct E {
operator C* ();
operator int ();
};
void f1()
{
A a;
B b;
C c;
D d;
E e;
+a;
+b;
+c;
+d;
// ambiguous
//ERROR(1): +e;
}
|
# FarmBot Ext OTP App
the `farmbot_ext` OTP app contains extended FarmbotCore functionality.
This includes mostly network functionality that isn't
possible to do in `farmbot_core`.
## Bootstrap subsystem
Subsystem responsible for bootstrapping a connection to the
FarmBot network services. This includes authenticating with
the FarmBot API, connecting to MQTT and syncing
the bare minimum resources to get up and running.
## HTTP/Sync subsystem
This is the subsystem that synchronizes FarmBot with the remote API.
It uses HTTP to download an index of all the data FarmBot cares about,
and compares timestamps to determine who has the most up to date data.
The basic flow is whoever has the most recent `updated_at` field will
become the "most truthy". If FarmBot has a more recent `updated_at` field,
FarmBot will do an HTTP PUT of it's data. If the remote resource does not
exist, FarmBot will do an HTTP POST of it's data. If the remote data has a more
recent `updated_at` field, FarmBot will do an HTTP GET and replace it's own data.
## MQTT subsystem
FarmBot maintains a connection to the API for real time communication. This
real time communication connection is multiplexed over multiple `channel`s.
Below is a description of the channels:
* bot_state - pushes a JSON encoded version of the `bot_state`
process (from `farmbot_core`)
* celery_script - receives/sends JSON encoded celery_script.
Used for controlling FarmBot externally
* log - sends log messages from `farmbot_core`'s logger
* ping/pong - echos everything received. used for detecting active connection
* auto_sync - the API dispatches every REST resource change on this channel.
Used to speed up HTTP requests
* telemetry - similar to the log channel, but sends consumable events,
rather than human readable messages
## Image uploader subsystem
This subsystem watches a local directory, and as matching files appear in that directory,
it uploads them using the FarmBot image upload protocol. Basically an HTTP request
to fetch credentials that are used to preform another HTTP request to upload
the photo.
|
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Net.Http;
using System.Reflection;
using System.Text;
using Lark.Core.Context;
using Lark.Core.Reflect;
using Lark.Core.Enum;
using System.Linq;
using Lark.Core.Attributes;
namespace Lark.Core.Attributes
{
[AttributeUsage(AttributeTargets.Parameter, AllowMultiple = false, Inherited = true)]
public class QueryStringAttribute : BaseAttribute
{
/// <summary>
/// 对于复杂类型,如果指定了Name,则整个对象进行字符串化。否则对其进行解构。
/// </summary>
/// <value></value>
public string Name { get; set; }
public QueryStringAttribute()
{
}
public QueryStringAttribute(string name)
{
Name = name;
}
internal override void Validate()
{
}
internal override void SaveToParameterContext(ParameterWrapContext parameterWrapContext)
{
parameterWrapContext.QueryStringAttribute=this;
}
}
}
|
#include "drink.hpp"
#include <iostream>
#include <string>
using std::cin; using std::cout;
using std::string; using std::endl;
//----------------------------
// File: baseCoffeeShack.cpp
// By: Nolan LaDuke
// Date: 3/30/2021
//------------------------------------------------------------------------------
// Function: Test file for functionality of drink class
// Asks the user repeatedly to add toppings, then displays total price + info
//------------------------------------------------------------------------------
int main(){
// Set up variables:
char userInput;
string custName;
// Prompt user
cout << "Welcome to Not_Starbucks, can I get a name for this order? ";
cin >> custName;
cout << "Hello " << custName << "!" << endl
<< "Would you like a [l]arge, [m]edium, or [s]mall coffee? ";
cin >> userInput;
// Case for invalid inputs:
while(userInput != 'l' && userInput != 'm' && userInput != 's'){
cout << "Sorry, the value you input is not valid, please try again:" << endl
<< "Can I get you a [l]arge, [m]edium, or [s]mall coffee? ";
cin >> userInput;
}
// Set Drink type + cost based on input
DrinkType size = userInput == 'l' ? DrinkType::large : (userInput == 'm' ? DrinkType::medium : DrinkType::small);
int cost = userInput == 'l' ? 3 : (userInput == 'm' ? 2 : 1);
// Set up base drink
Drink* custDrink = new Drink(size, custName, cost);
// Repeatedly prompt user for toppings until they give the 'n' input:
cout << "Would you like to add [s]prinkles, [c]aramel, milk [f]oam, [i]ce or [n]othing? ";
cin >> userInput;
while(userInput != 'n'){
switch(userInput){
case('s'):
custDrink = new sprinkles(custDrink);
break;
case('c'):
custDrink = new caramel(custDrink);
break;
case('f'):
custDrink = new milkFoam(custDrink);
break;
case('i'):
custDrink = new ice(custDrink);
break;
default: // Default for invalid input:
cout << "Sorry, the input you gave was not valid - please try again:" << endl;
}
cout << "Would you like to add [s]prinkles, [c]aramel, milk [f]oam, [i]ce or [n]othing? ";
cin >> userInput;
}
// Print ending sequence
cout << endl
<< custDrink->getName() << ", your " << custDrink->getSizeName()
<< " with " << custDrink->getToppings() << " is ready." << endl
<< "Your total is $" << custDrink->getPrice() << endl;
delete custDrink; // Delete pointers
return 0;
}
|
// BLAS level 2
// TNT arrays
#include <iostream>
#include <boost/numeric/bindings/atlas/cblas1.hpp>
#include <boost/numeric/bindings/atlas/cblas2.hpp>
#include <boost/numeric/bindings/traits/tnt.hpp>
#include "utils.h"
#include "tnt_utils.h"
namespace atlas = boost::numeric::bindings::atlas;
using std::cout;
using std::endl;
#ifndef F_FORTRAN
typedef TNT::Array1D<double> vct_t;
typedef TNT::Array2D<double> matr_t;
#else
typedef TNT::Fortran_Array1D<double> vct_t;
typedef TNT::Fortran_Array2D<double> matr_t;
#endif
int main() {
cout << endl;
vct_t vx (2);
atlas::set (1., vx);
print_v (vx, "vx");
vct_t vy (3);
atlas::set (0., vy);
print_v (vy, "vy");
cout << endl;
matr_t m (3, 2);
init_m (m, kpp (1));
print_m (m, "m");
cout << endl;
atlas::gemv (CblasNoTrans, 1.0, m, vx, 0.0, vy);
print_v (vy, "m vx");
atlas::gemv (m, vx, vy);
print_v (vy, "m vx");
cout << endl;
atlas::set (0, vx);
atlas::set (1, vy);
atlas::gemv (CblasTrans, 1.0, m, vy, 0.0, vx);
print_v (vx, "m^T vy");
cout << endl;
atlas::set (1, vy);
atlas::gemv (CblasNoTrans, 1.0, m, vx, 1.0, vy);
print_v (vy, "vy + m vx");
cout << endl;
atlas::set (1, vy);
atlas::gemv (CblasNoTrans, 2.0, m, vx, 0.5, vy);
print_v (vy, "0.5 vy + 2.0 m vx");
cout << endl;
}
|
$ $MERLIN single type-enclosing -position 4:11 -verbosity 0 \
> -filename ./record.ml < ./record.ml | jq ".value[0:2]"
[
{
"start": {
"line": 4,
"col": 10
},
"end": {
"line": 4,
"col": 11
},
"type": "float",
"tail": "no"
},
{
"start": {
"line": 4,
"col": 8
},
"end": {
"line": 4,
"col": 18
},
"type": "t",
"tail": "no"
}
]
$ $MERLIN single type-enclosing -position 6:11 -verbosity 0 \
> -filename ./record.ml < ./record.ml | jq ".value[0:2]"
[
{
"start": {
"line": 6,
"col": 8
},
"end": {
"line": 6,
"col": 18
},
"type": "'a",
"tail": "no"
}
]
$ $MERLIN single type-enclosing -position 8:11 -verbosity 0 \
> -filename ./record.ml < ./record.ml | jq ".value[0:2]"
[
{
"start": {
"line": 8,
"col": 10
},
"end": {
"line": 8,
"col": 11
},
"type": "float",
"tail": "no"
},
{
"start": {
"line": 8,
"col": 8
},
"end": {
"line": 8,
"col": 17
},
"type": "unit",
"tail": "no"
}
]
$ $MERLIN single type-enclosing -position 8:9 -verbosity 0 \
> -filename ./record.ml < ./record.ml | jq ".value[0:2]"
[
{
"start": {
"line": 8,
"col": 8
},
"end": {
"line": 8,
"col": 9
},
"type": "t",
"tail": "no"
},
{
"start": {
"line": 8,
"col": 8
},
"end": {
"line": 8,
"col": 9
},
"type": "t",
"tail": "no"
}
]
$ $MERLIN single type-enclosing -position 8:9 -verbosity 1 \
> -filename ./record.ml < ./record.ml | jq ".value[0:2]"
[
{
"start": {
"line": 8,
"col": 8
},
"end": {
"line": 8,
"col": 9
},
"type": "type t = { mutable b : float; }",
"tail": "no"
},
{
"start": {
"line": 8,
"col": 8
},
"end": {
"line": 8,
"col": 9
},
"type": "type t = { mutable b : float; }",
"tail": "no"
}
]
FIXME: The following results are not entirely satisfying (`foo.Bar -> foo` could be expanded to `{ baz : unit } -> foo`)
$ $MERLIN single type-enclosing -position 12:9 -verbosity 0 \
> -filename ./record.ml < ./record.ml | jq ".value[0:2]"
[
{
"start": {
"line": 12,
"col": 8
},
"end": {
"line": 12,
"col": 11
},
"type": "foo.Bar -> foo",
"tail": "no"
},
{
"start": {
"line": 12,
"col": 8
},
"end": {
"line": 12,
"col": 26
},
"type": "foo",
"tail": "no"
}
]
$ $MERLIN single type-enclosing -position 12:9 -verbosity 1 \
> -filename ./record.ml < ./record.ml | jq ".value[0:2]"
[
{
"start": {
"line": 12,
"col": 8
},
"end": {
"line": 12,
"col": 11
},
"type": "foo.Bar -> foo",
"tail": "no"
},
{
"start": {
"line": 12,
"col": 8
},
"end": {
"line": 12,
"col": 26
},
"type": "type foo = Bar of { baz : unit; }",
"tail": "no"
}
]
$ $MERLIN single type-enclosing -position 12:16 -verbosity 0 \
> -filename ./record.ml < ./record.ml | jq ".value[0:2]"
[
{
"start": {
"line": 12,
"col": 15
},
"end": {
"line": 12,
"col": 18
},
"type": "unit",
"tail": "no"
},
{
"start": {
"line": 12,
"col": 12
},
"end": {
"line": 12,
"col": 26
},
"type": "foo.Bar",
"tail": "no"
}
]
$ $MERLIN single type-enclosing -position 12:16 -verbosity 1 \
> -filename ./record.ml < ./record.ml | jq ".value[0:2]"
[
{
"start": {
"line": 12,
"col": 15
},
"end": {
"line": 12,
"col": 18
},
"type": "unit",
"tail": "no"
},
{
"start": {
"line": 12,
"col": 12
},
"end": {
"line": 12,
"col": 26
},
"type": "type Bar = { baz : unit; }",
"tail": "no"
}
]
|
CREATE TABLE IF NOT EXISTS "dirs" (
"path" text,
"indexed_at" datetime,
PRIMARY KEY ("path")
);
|
require File.join(File.dirname(__FILE__), "..", "test_helper")
require 'mocha/no_yields'
class NoYieldsTest < Test::Unit::TestCase
include Mocha
def test_should_provide_parameters_for_no_yields_in_single_invocation
parameter_group = NoYields.new
parameter_groups = []
parameter_group.each do |parameters|
parameter_groups << parameters
end
assert_equal [], parameter_groups
end
end
|
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Testing submitForm with an adjacent form</title>
</head>
<body>
<form id="form-1" method="POST" action="/form/complex">
<input type="text" name="first-field" value="Ptitsa" />
<input id="submit1" type="submit" value="submit" />
</form>
<form id="form-2" method="POST" action="/form/complex">
<input type="text" name="second-field" value="Killgore Trout" />
<input type="submit" id="submit2" type="submit" value="submit" />
</form>
</body>
</html>
|
import type { BsConfig } from './BsConfig';
/**
* There are some bugs with chokidar, so this attempts to mitigate them
*/
export declare class Watcher {
private options;
constructor(options: BsConfig);
private watchers;
/**
* Watch the paths or globs
* @param paths
*/
watch(paths: string | string[]): () => Promise<void>;
/**
* Be notified of all events
* @param event
* @param callback
*/
on(event: 'all', callback: (event: any, path: any, details: any) => void): () => void;
dispose(): void;
}
|
using System;
namespace OfficeOpenXml.FormulaParsing.Excel.Functions.RefAndLookup
{
public static class LookupNavigatorFactory
{
public static LookupNavigator Create(LookupDirection direction, LookupArguments args, ParsingContext parsingContext)
{
if (args.ArgumentDataType == LookupArguments.LookupArgumentDataType.ExcelRange)
{
return new ExcelLookupNavigator(direction, args, parsingContext);
}
else if (args.ArgumentDataType == LookupArguments.LookupArgumentDataType.DataArray)
{
return new ArrayLookupNavigator(direction, args, parsingContext);
}
throw new NotSupportedException("Invalid argument datatype");
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.