repo_name
stringlengths 6
97
| path
stringlengths 3
341
| text
stringlengths 8
1.02M
|
---|---|---|
tumsgis/veganmentor | src/main/scala/MongoDb.scala |
import DataStructure.{Mentee, Mentor, Participant}
import MongoDb.MongoFactory
import com.mongodb.casbah.Imports.{ObjectId, _}
import com.mongodb.casbah.{Imports, MongoCollection, MongoConnection}
object MongoDb {
sealed trait MongoDbParticipant {
val id: ObjectId
val participant: Participant
}
case class MongoDbMentor(id: ObjectId,
participant: Participant,
emptySlots: Int) extends MongoDbParticipant
case class MongoDbMentee(id: ObjectId,
participant: Participant,
menteeId: Option[ObjectId]) extends MongoDbParticipant
case class MongoFactory(collectionName: String) {
private val server = "localhost"
private val port = 27017
private val db = "veganMentor"
val connection: MongoConnection = MongoConnection(server)
val collection: MongoCollection = connection(db)(collectionName)
}
}
object MongoDbRepo {
def fromUnpairedParticipant(participant: Participant,
relationId: Option[ObjectId] = None): MongoDBObject = {
val builder = MongoDBObject.newBuilder
participant match {
case Mentor(id, _, _, _, _, _, _, _, _) =>
builder += "_id" -> id
case _ => /* Nothing */
}
builder += "timestamp" -> Util.toJavaDate(participant.timestamp)
builder += "email" -> participant.email
builder += "name" -> participant.name
builder += "note" -> participant.note
builder += "approvedTermsAndConditions" -> participant.approvedTermsAndConditions
(participant, relationId) match {
// Add mentor id to mentees (kind of a foreign key)
case (Mentee(_, _, _, _, _, _, _, _), Some(id)) => builder += "mentorId" -> id
case (Mentor(_, _, _, _, _, _, approvedSlots, _, mentees), _) =>
builder += "approvedSlots" -> approvedSlots
builder += "emptySlots" -> (approvedSlots - mentees.size)
case _ => /* Nothing */
}
builder.result
}
def saveParticipant(participant: Participant): Unit = {
val mongoObj = fromUnpairedParticipant(participant)
participant match {
case Mentor(_, _, _, _, _, _, _, _, mentees) => saveParticipant(mongoObj, "mentor", participant.email)
// Add all connected mentors
mentees.foreach(m => {
val menteeObj = fromUnpairedParticipant(m, mongoObj._id)
saveParticipant(menteeObj, "mentee", m.email)
})
case Mentee(_, _, _, _, _, _, _, _) => saveParticipant(mongoObj, "mentee", participant.email)
}
}
private def saveParticipant(mongoObj: MongoDBObject, collection: String, email: String): Unit =
if (!emailAlreadyRegistered(email))
MongoFactory(collection).collection.save(mongoObj, WriteConcern.Safe)
private def emailAlreadyRegistered(email: String): Boolean = {
if (getMentorByEmail(email).nonEmpty) {
println(s"Mentor with email $email already registered")
return true
} else if (getMenteeByEmail(email).nonEmpty) {
println(s"Mentee with email $email already registered")
return true
}
false
}
def updateParticipant(participant: Participant): Unit = {
val mongoObj = fromUnpairedParticipant(participant)
participant match {
case Mentor(id, _, _, _, _, _, _, emptySlots, mentees) => updateParticipant("mentor", id, ("emptySlots", emptySlots - mentees.size))
// Add all connected mentors
mentees.foreach(m => {
val menteeObj = fromUnpairedParticipant(m, mongoObj._id)
updateParticipant("mentee", m.id, ("mentorId", mongoObj._id.get))
})
case _ => /* Nothing */
}
}
private def updateParticipant(collection: String, id: Option[Imports.ObjectId], field: (String, Any)): Unit =
MongoFactory(collection).collection.update(MongoDBObject("_id" -> id), $set(field))
def toMentor(dbObject: DBObject): Mentor =
Mentor(
Some(dbObject.as[ObjectId]("_id")),
Util.fromJavaDate(dbObject.as[java.util.Date]("timestamp")),
dbObject.as[String]("email"),
dbObject.as[String]("name"),
dbObject.as[String]("note"),
dbObject.as[Boolean]("approvedTermsAndConditions"),
dbObject.as[Int]("approvedSlots"),
dbObject.as[Int]("emptySlots")
)
def toMentee(dbObject: DBObject): Mentee =
Mentee(
Some(dbObject.as[ObjectId]("_id")),
Util.fromJavaDate(dbObject.as[java.util.Date]("timestamp")),
dbObject.as[String]("email"),
dbObject.as[String]("name"),
dbObject.as[String]("note"),
dbObject.as[Boolean]("approvedTermsAndConditions"),
if (dbObject.containsField("mentorId")) Some(dbObject.as[ObjectId]("mentorId")) else None,
None
)
def getAllMentors: List[Mentor] =
MongoFactory("mentor").collection.find.map(toMentor).toList
def getAllMentees: List[Mentee] =
MongoFactory("mentee").collection.find.map(toMentee).toList
def get(collection: String, query: DBObject): Iterator[Imports.DBObject] =
for (x <- MongoFactory(collection).collection.find(query)) yield x
def getOne(collection: String, query: DBObject): Option[Imports.DBObject] =
for (x <- MongoFactory(collection).collection.findOne(query)) yield x
def getMentorsWithEmptySlots: List[Mentor] =
get("mentor", $and("approvedTermsAndConditions" $eq true, "emptySlots" $gt 0)).map(toMentor).toList
def getMentorByEmail(email: String): Option[Mentor] =
getOne("mentor", "email" $eq email).map(toMentor)
def getMenteesSeekingMentor: List[Mentee] =
get("mentee", $and("approvedTermsAndConditions" $eq true, "mentorId" $exists false)).map(toMentee).toList
def getMenteeByEmail(email: String): Option[Mentee] =
getOne("mentee", "email" $eq email).map(toMentee)
def getMenteesByMentorId(mentorId: ObjectId): List[Mentee] =
get("mentee", "mentorId" $eq mentorId).map(toMentee).toList
def dropAllParticipants(): Unit = {
MongoFactory("mentor").collection.drop
MongoFactory("mentee").collection.drop
}
} |
tumsgis/veganmentor | src/test/scala/TestUtil.scala | object TestUtil {
def noEmptySlotsBelowZero: Boolean =
!MongoDbRepo.getAllMentors.exists(_.emptySlots < 0)
}
|
tumsgis/veganmentor | src/main/scala/JsonMapping.scala | <reponame>tumsgis/veganmentor
import DataStructure.{Mentee, Mentor}
import net.liftweb.json.JsonAST._
import net.liftweb.json.JsonDSL._
object JsonMapping {
implicit val formats = net.liftweb.json.DefaultFormats
def printMentor(mentor: Mentor): Unit = {
val json = "mentor" ->
("timestamp" -> mentor.timestamp.toString) ~
("email" -> mentor.email) ~
("name" -> mentor.name) ~
("note" -> mentor.note) ~
("approvedTermsAndConditions" -> mentor.approvedTermsAndConditions) ~
("approvedSlots" -> mentor.approvedSlots) ~
("emptySlots" -> mentor.emptySlots) ~
("mentees" ->
mentor.mentees.map { m =>
("timestamp" -> m.timestamp.toString) ~
("email" -> m.email) ~
("name" -> m.name) ~
("note" -> m.note) ~
("approvedTermsAndConditions" -> m.approvedTermsAndConditions)
})
println(prettyRender(json))
}
def printMentee(mentee: Mentee): Unit = {
val json = "mentor" ->
("timestamp" -> mentee.timestamp.toString) ~
("email" -> mentee.email) ~
("name" -> mentee.name) ~
("note" -> mentee.note) ~
("approvedTermsAndConditions" -> mentee.approvedTermsAndConditions) ~
("mentor" ->
mentee.mentor.map { m =>
("timestamp" -> m.timestamp.toString) ~
("email" -> m.email) ~
("name" -> m.name) ~
("note" -> m.note) ~
("approvedTermsAndConditions" -> m.approvedTermsAndConditions) ~
("nrOfAvailableParticipants" -> m.approvedSlots)
})
println(prettyRender(json))
}
}
|
harrywada/twitter4s | src/main/scala/com/danielasfregola/twitter4s/entities/v2/TweetEntities.scala | package com.danielasfregola.twitter4s.entities.v2
final case class TweetEntities(annotations: Seq[TweetEntitiesAnnotation],
urls: Seq[TweetEntitiesURL],
hashtags: Seq[TweetEntitiesHashtag],
mentions: Seq[TweetEntitiesMention],
cashtags: Seq[TweetEntitiesCashtag])
final case class TweetEntitiesAnnotation(start: Int,
end: Int,
probability: Float,
`type`: String,
normalized_text: String)
final case class TweetEntitiesURL(start: Int,
end: Int,
url: String,
expanded_url: String,
display_url: String,
unwound_url: Option[String])
final case class TweetEntitiesHashtag(start: Int,
end: Int,
tag: String)
final case class TweetEntitiesMention(start: Int,
end: Int,
username: Option[String],
id: Option[String])
final case class TweetEntitiesCashtag(start: Int,
end: Int,
tag: String)
|
harrywada/twitter4s | src/test/scala/com/danielasfregola/twitter4s/http/clients/rest/v2/tweets/TwitterTweetLookupClientSpec.scala | <reponame>harrywada/twitter4s
package com.danielasfregola.twitter4s.http.clients.rest.v2.tweets
import akka.http.scaladsl.model.HttpMethods
import com.danielasfregola.twitter4s.entities.RatedData
import com.danielasfregola.twitter4s.entities.v2.responses.{TweetResponse, TweetsResponse}
import com.danielasfregola.twitter4s.helpers.ClientSpec
import com.danielasfregola.twitter4s.http.clients.rest.v2.tweets.fixtures.tweet_lookup.{TweetResponseFixture, TweetsResponseFixture}
import com.danielasfregola.twitter4s.http.clients.rest.v2.utils.V2SpecQueryHelper
class TwitterTweetLookupClientSpec extends ClientSpec {
class TwitterTweetLookupClientSpecContext extends RestClientSpecContext with TwitterTweetLookupClient
"Twitter Tweet Lookup Client" should {
"lookup tweets" in new TwitterTweetLookupClientSpecContext {
val tweetIds = Seq("123", "456")
val result: RatedData[TweetsResponse] = when(lookupTweets(tweetIds))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === "https://api.twitter.com/2/tweets"
request.uri.rawQueryString === Some(V2SpecQueryHelper.buildIdsParam(tweetIds))
}
.respondWithRated("/twitter/rest/v2/tweets/tweetlookup/tweets.json")
.await
result.rate_limit === rateLimit
result.data === TweetsResponseFixture.fixture
}
"lookup tweets with expansions" in new TwitterTweetLookupClientSpecContext {
val tweetIds = Seq("123", "456")
val expansions = V2SpecQueryHelper.allTweetExpansions
when(lookupTweets(
ids = tweetIds,
expansions = expansions
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === "https://api.twitter.com/2/tweets"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildTweetExpansions(expansions),
V2SpecQueryHelper.buildIdsParam(tweetIds)
).mkString("&"))
}
.respondWithOk
.await
}
"lookup tweets with tweet fields" in new TwitterTweetLookupClientSpecContext {
val tweetIds = Seq("123", "456")
val tweetFields = V2SpecQueryHelper.allTweetFields
when(lookupTweets(
ids = tweetIds,
tweetFields = tweetFields
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === "https://api.twitter.com/2/tweets"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildIdsParam(tweetIds),
V2SpecQueryHelper.buildTweetFieldsParam(tweetFields)
).mkString("&"))
}
.respondWithOk
.await
}
"lookup tweets with user fields" in new TwitterTweetLookupClientSpecContext {
val tweetIds = Seq("123", "456")
val userFields = V2SpecQueryHelper.allUserFields
when(lookupTweets(
ids = tweetIds,
userFields = userFields
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === "https://api.twitter.com/2/tweets"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildIdsParam(tweetIds),
V2SpecQueryHelper.buildUserFieldsParam(userFields)
).mkString("&"))
}
.respondWithOk
.await
}
"lookup tweet" in new TwitterTweetLookupClientSpecContext {
val result: RatedData[TweetResponse] = when(lookupTweet("123"))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === "https://api.twitter.com/2/tweets/123"
request.uri.rawQueryString === None
}
.respondWithRated("/twitter/rest/v2/tweets/tweetlookup/tweet.json")
.await
result.rate_limit === rateLimit
result.data === TweetResponseFixture.fixture
}
"lookup tweet with expansions" in new TwitterTweetLookupClientSpecContext {
val tweetId = "123"
val expansions = V2SpecQueryHelper.allTweetExpansions
when(lookupTweet(
id = tweetId,
expansions = expansions
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === s"https://api.twitter.com/2/tweets/$tweetId"
request.uri.rawQueryString === Some(V2SpecQueryHelper.buildTweetExpansions(expansions))
}
.respondWithOk
.await
}
"lookup tweet with tweet fields" in new TwitterTweetLookupClientSpecContext {
val tweetId = "123"
val tweetFields = V2SpecQueryHelper.allTweetFields
when(lookupTweet(
id = tweetId,
tweetFields = tweetFields
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === s"https://api.twitter.com/2/tweets/$tweetId"
request.uri.rawQueryString === Some(V2SpecQueryHelper.buildTweetFieldsParam(tweetFields))
}
.respondWithOk
.await
}
"lookup tweet with user fields" in new TwitterTweetLookupClientSpecContext {
val tweetId = "123"
val userFields = V2SpecQueryHelper.allUserFields
when(lookupTweet(
id = tweetId,
userFields = userFields
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === s"https://api.twitter.com/2/tweets/$tweetId"
request.uri.rawQueryString === Some(V2SpecQueryHelper.buildUserFieldsParam(userFields))
}
.respondWithOk
.await
}
}
}
|
harrywada/twitter4s | src/test/scala/com/danielasfregola/twitter4s/http/clients/rest/v2/users/fixtures/user_lookup/UsersResponseFixture.scala | <reponame>harrywada/twitter4s
package com.danielasfregola.twitter4s.http.clients.rest.v2.users.fixtures.user_lookup
import com.danielasfregola.twitter4s.entities.v2._
import com.danielasfregola.twitter4s.entities.v2.responses.UsersResponse
import java.time.Instant
object UsersResponseFixture {
val fixture: UsersResponse = UsersResponse(
data = Seq(User(
id = "6338724728067829004",
name = "<NAME>",
username = "shippy",
created_at = Some(Instant.parse("2020-05-15T16:03:42.000Z")),
`protected` = Some(false),
location = Some("Seattle"),
url = Some("https://www.google.com/sodales"),
description = Some("Sed efficitur ultrices elit sed volutpat."),
verified = Some(true),
entities = Some(UserEntities(
url = Some(UserURLContainer(
urls = Seq(
UserEntitiesURL(
start = 257,
end = 280,
url = "https://t.co/sodales",
expanded_url = "https://www.google.com/sodales",
display_url = "example.google.com/sodales",
)
)
)),
description = Some(UserEntitiesDescription(
urls = Seq(
UserEntitiesURL(
start = 257,
end = 280,
url = "https://t.co/sodales",
expanded_url = "https://www.google.com/sodales",
display_url = "example.google.com/sodales",
)
),
mentions = Seq(
UserEntitiesMention(
start = 105,
end = 121,
username = "SuspendisseAtNunc"
),
UserEntitiesMention(
start = 125,
end = 138,
username = "SuspendisseAtNuncPosuere"
)
),
hashtags = Seq(
UserEntitiesHashtag(
start = 47,
end = 60,
tag = "SuspendisseAtNunc"
),
UserEntitiesHashtag(
start = 171,
end = 194,
tag = "SuspendisseNunc"
)
),
cashtags = Seq(UserEntitiesCashtag(
start = 41,
end = 44,
tag = "GE"
))
))
)),
profile_image_url = Some("https://www.google.com/sodales.adri"),
public_metrics = Some(UserPublicMetrics(
followers_count = 501796,
following_count = 306,
tweet_count = 6655,
listed_count = 1433
)),
pinned_tweet_id = Some("2894469526322928935"),
withheld = None
)),
includes = Some(UserIncludes(
tweets = Seq(Tweet(
id = "6304480225832455363",
text = "Donec feugiat elit tellus, a ultrices elit sodales facilisis.",
attachments = None,
author_id = None,
context_annotations = None,
conversation_id = None,
created_at = None,
entities = None,
geo = None,
in_reply_to_user_id = None,
lang = None,
non_public_metrics = None,
organic_metrics = None,
possibly_sensitive = None,
promoted_metrics = None,
public_metrics = None,
referenced_tweets = None,
reply_settings = None,
source = None,
withheld = None
)),
users = Seq(
User(
id = "3955854555026519618",
name = "AliquamOrciEros",
username = "aliquamorcieros",
created_at = None,
`protected` = None,
withheld = None,
location = None,
url = None,
description = None,
verified = None,
entities = None,
profile_image_url = None,
public_metrics = None,
pinned_tweet_id = None
),
User(
id = "6747736441958634428",
name = "<NAME>",
username = "suspendisseatnunc",
created_at = None,
`protected` = None,
withheld = None,
location = None,
url = None,
description = None,
verified = None,
entities = None,
profile_image_url = None,
public_metrics = None,
pinned_tweet_id = None
)
)
)),
errors = Seq.empty[Error]
)
}
|
harrywada/twitter4s | src/test/scala/com/danielasfregola/twitter4s/http/clients/rest/v2/utils/V2SpecQueryHelper.scala | package com.danielasfregola.twitter4s.http.clients.rest.v2.utils
import com.danielasfregola.twitter4s.entities.v2.enums.expansions.TweetExpansions
import com.danielasfregola.twitter4s.entities.v2.enums.expansions.TweetExpansions.{Expansions => TweetExpansions}
import com.danielasfregola.twitter4s.entities.v2.enums.expansions.UserExpansions
import com.danielasfregola.twitter4s.entities.v2.enums.expansions.UserExpansions.{Expansions => UserExpansions}
import com.danielasfregola.twitter4s.entities.v2.enums.fields.TweetFields.TweetFields
import com.danielasfregola.twitter4s.entities.v2.enums.fields.UserFields.UserFields
import com.danielasfregola.twitter4s.entities.v2.enums.fields.{TweetFields, UserFields}
import java.net.URLEncoder
private[v2] object V2SpecQueryHelper {
val allTweetExpansions: Seq[TweetExpansions] = Seq(
TweetExpansions.AuthorId,
TweetExpansions.`Entities.Mentions.Username`,
TweetExpansions.InReplyToUser,
TweetExpansions.`ReferencedTweets.Id`,
TweetExpansions.`ReferencedTweets.Id.AuthorId`
)
val allUserExpansions: Seq[UserExpansions] = Seq(
UserExpansions.PinnedTweetId
)
val allTweetFields: Seq[TweetFields] = Seq(
TweetFields.Attachments,
TweetFields.AuthorId,
TweetFields.ContextAnnotations,
TweetFields.ConversationId,
TweetFields.CreatedAt,
TweetFields.Entities,
TweetFields.Geo,
TweetFields.Id,
TweetFields.InReplyToUserId,
TweetFields.Lang,
TweetFields.NonPublicMetrics,
TweetFields.PublicMetrics,
TweetFields.OrganicMetrics,
TweetFields.PromotedMetrics,
TweetFields.PossiblySensitive,
TweetFields.ReferencedTweets,
TweetFields.ReplySettings,
TweetFields.Source,
TweetFields.Text,
TweetFields.Withheld
)
val allUserFields: Seq[UserFields] = Seq(
UserFields.CreatedAt,
UserFields.Description,
UserFields.Entities,
UserFields.Id,
UserFields.Location,
UserFields.Name,
UserFields.PinnedTweetId,
UserFields.ProfileImageUrl,
UserFields.Protected,
UserFields.PublicMetrics,
UserFields.Url,
UserFields.Username,
UserFields.Verified,
UserFields.Withheld
)
def buildIdsParam(ids: Seq[String]): String = "ids=" + encodeQueryParamValue(ids.mkString(","))
def buildUsernamesParam(usernames: Seq[String]): String = "usernames=" + encodeQueryParamKey(usernames.mkString(","))
def buildTweetExpansions(expansions: Seq[TweetExpansions]): String = "expansions=" + encodeQueryParamValue(expansions.mkString(","))
def buildUserExpansions(expansions: Seq[UserExpansions]): String = "expansions=" + encodeQueryParamValue(expansions.mkString(","))
def buildTweetFieldsParam(fields: Seq[TweetFields]): String = encodeQueryParamKey("tweet.fields") + "=" + encodeQueryParamValue(fields.mkString(","))
def buildUserFieldsParam(fields: Seq[UserFields]): String = encodeQueryParamKey("user.fields") + "=" + encodeQueryParamValue(fields.mkString(","))
private def encodeQueryParamKey(str: String) = URLEncoder.encode(str, "UTF-8").replace(".","%2E")
private def encodeQueryParamValue(str: String) = URLEncoder.encode(str, "UTF-8")
}
|
harrywada/twitter4s | src/main/scala/com/danielasfregola/twitter4s/entities/v2/Error.scala | <reponame>harrywada/twitter4s
package com.danielasfregola.twitter4s.entities.v2
final case class Error(detail: String,
field: Option[String],
parameter: String,
resource_id: String,
resource_type: String,
section: Option[String],
title: String,
`type`: Option[String],
value: Option[String])
|
harrywada/twitter4s | src/test/scala/com/danielasfregola/twitter4s/http/clients/rest/v2/users/TwitterUserLookupClientSpec.scala | package com.danielasfregola.twitter4s.http.clients.rest.v2.users
import akka.http.scaladsl.model.HttpMethods
import com.danielasfregola.twitter4s.entities.RatedData
import com.danielasfregola.twitter4s.entities.v2.enums.expansions.UserExpansions.{Expansions => UserExpansions}
import com.danielasfregola.twitter4s.entities.v2.enums.fields.TweetFields.TweetFields
import com.danielasfregola.twitter4s.entities.v2.enums.fields.UserFields.UserFields
import com.danielasfregola.twitter4s.entities.v2.responses.{UserResponse, UsersResponse}
import com.danielasfregola.twitter4s.helpers.ClientSpec
import com.danielasfregola.twitter4s.http.clients.rest.v2.users.fixtures.user_lookup.{UserResponseFixture, UsersResponseFixture}
import com.danielasfregola.twitter4s.http.clients.rest.v2.utils.V2SpecQueryHelper
class TwitterUserLookupClientSpec extends ClientSpec {
class TwitterUserLookupClientSpecContext extends RestClientSpecContext with TwitterUserLookupClient
"Twitter User Lookup Client" should {
"lookup users" in new TwitterUserLookupClientSpecContext {
val userIds = Seq("123","456")
val result: RatedData[UsersResponse] = when(lookupUsers(userIds))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === "https://api.twitter.com/2/users"
request.uri.rawQueryString === Some(V2SpecQueryHelper.buildIdsParam(userIds))
}
.respondWithRated("/twitter/rest/v2/users/userlookup/users.json")
.await
result.rate_limit === rateLimit
result.data === UsersResponseFixture.fixture
}
"lookup users with expansions" in new TwitterUserLookupClientSpecContext {
val userIds = Seq("123","456")
val expansions: Seq[UserExpansions] = V2SpecQueryHelper.allUserExpansions
when(lookupUsers(
ids = userIds,
expansions = expansions
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === "https://api.twitter.com/2/users"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildUserExpansions(expansions),
V2SpecQueryHelper.buildIdsParam(userIds)
).mkString("&"))
}
.respondWithOk
.await
}
"lookup users with user fields" in new TwitterUserLookupClientSpecContext {
val userIds = Seq("123","456")
val userFields: Seq[UserFields] = V2SpecQueryHelper.allUserFields
when(lookupUsers(
ids = userIds,
userFields = userFields
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === "https://api.twitter.com/2/users"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildIdsParam(userIds),
V2SpecQueryHelper.buildUserFieldsParam(userFields)
).mkString("&"))
}
.respondWithOk
.await
}
"lookup users with tweet fields" in new TwitterUserLookupClientSpecContext {
val userIds = Seq("123","456")
val tweetFields: Seq[TweetFields] = V2SpecQueryHelper.allTweetFields
when(lookupUsers(
ids = userIds,
tweetFields = tweetFields
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === "https://api.twitter.com/2/users"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildIdsParam(userIds),
V2SpecQueryHelper.buildTweetFieldsParam(tweetFields)
).mkString("&"))
}
.respondWithOk
.await
}
"lookup user" in new TwitterUserLookupClientSpecContext {
val userId = "123"
val result: RatedData[UserResponse] = when(lookupUser(userId))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === s"https://api.twitter.com/2/users/$userId"
request.uri.rawQueryString === None
}
.respondWithRated("/twitter/rest/v2/users/userlookup/user.json")
.await
result.rate_limit === rateLimit
result.data === UserResponseFixture.fixture
}
"lookup user with expansions" in new TwitterUserLookupClientSpecContext {
val userId = "123"
val expansions: Seq[UserExpansions] = V2SpecQueryHelper.allUserExpansions
when(lookupUser(
id = userId,
expansions = expansions
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === s"https://api.twitter.com/2/users/$userId"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildUserExpansions(expansions)
).mkString("&"))
}
.respondWithOk
.await
}
"lookup user with user fields" in new TwitterUserLookupClientSpecContext {
val userId = "123"
val userFields: Seq[UserFields] = V2SpecQueryHelper.allUserFields
when(lookupUser(
id = userId,
userFields = userFields
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === s"https://api.twitter.com/2/users/$userId"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildUserFieldsParam(userFields)
).mkString("&"))
}
.respondWithOk
.await
}
"lookup user with tweet fields" in new TwitterUserLookupClientSpecContext {
val userId = "123"
val tweetFields: Seq[TweetFields] = V2SpecQueryHelper.allTweetFields
when(lookupUser(
id = userId,
tweetFields = tweetFields
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === s"https://api.twitter.com/2/users/$userId"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildTweetFieldsParam(tweetFields)
).mkString("&"))
}
.respondWithOk
.await
}
"lookup users by usernames" in new TwitterUserLookupClientSpecContext {
val usernames = Seq("user1","user2")
val result: RatedData[UsersResponse] = when(lookupUsersByUsernames(usernames))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === "https://api.twitter.com/2/users/by"
request.uri.rawQueryString === Some(V2SpecQueryHelper.buildUsernamesParam(usernames))
}
.respondWithRated("/twitter/rest/v2/users/userlookup/users.json")
.await
result.rate_limit === rateLimit
result.data === UsersResponseFixture.fixture
}
"lookup users by usernames with expansions" in new TwitterUserLookupClientSpecContext {
val usernames = Seq("user1","user2")
val expansions: Seq[UserExpansions] = V2SpecQueryHelper.allUserExpansions
when(lookupUsersByUsernames(
usernames = usernames,
expansions = expansions
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === "https://api.twitter.com/2/users/by"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildUserExpansions(expansions),
V2SpecQueryHelper.buildUsernamesParam(usernames)
).mkString("&"))
}
.respondWithOk
.await
}
"lookup users by usernames with user fields" in new TwitterUserLookupClientSpecContext {
val usernames = Seq("user1","user2")
val userFields: Seq[UserFields] = V2SpecQueryHelper.allUserFields
when(lookupUsersByUsernames(
usernames = usernames,
userFields = userFields
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === "https://api.twitter.com/2/users/by"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildUserFieldsParam(userFields),
V2SpecQueryHelper.buildUsernamesParam(usernames)
).mkString("&"))
}
.respondWithOk
.await
}
"lookup users by usernames with tweet fields" in new TwitterUserLookupClientSpecContext {
val usernames = Seq("user1","user2")
val tweetFields: Seq[TweetFields] = V2SpecQueryHelper.allTweetFields
when(lookupUsersByUsernames(
usernames = usernames,
tweetFields = tweetFields
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === "https://api.twitter.com/2/users/by"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildTweetFieldsParam(tweetFields),
V2SpecQueryHelper.buildUsernamesParam(usernames)
).mkString("&"))
}
.respondWithOk
.await
}
"lookup user by username" in new TwitterUserLookupClientSpecContext {
val username = "user1"
val result: RatedData[UserResponse] = when(lookupUserByUsername(username))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === s"https://api.twitter.com/2/users/by/username/$username"
request.uri.rawQueryString === None
}
.respondWithRated("/twitter/rest/v2/users/userlookup/user.json")
.await
result.rate_limit === rateLimit
result.data === UserResponseFixture.fixture
}
"lookup user with expansions" in new TwitterUserLookupClientSpecContext {
val username = "user1"
val expansions: Seq[UserExpansions] = V2SpecQueryHelper.allUserExpansions
when(lookupUserByUsername(
username = username,
expansions = expansions
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === s"https://api.twitter.com/2/users/by/username/$username"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildUserExpansions(expansions)
).mkString("&"))
}
.respondWithOk
.await
}
"lookup user by username with user fields" in new TwitterUserLookupClientSpecContext {
val username = "user1"
val userFields: Seq[UserFields] = V2SpecQueryHelper.allUserFields
when(lookupUserByUsername(
username = username,
userFields = userFields
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === s"https://api.twitter.com/2/users/by/username/$username"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildUserFieldsParam(userFields)
).mkString("&"))
}
.respondWithOk
.await
}
"lookup user by username with tweet fields" in new TwitterUserLookupClientSpecContext {
val username = "user1"
val tweetFields: Seq[TweetFields] = V2SpecQueryHelper.allTweetFields
when(lookupUserByUsername(
username = username,
tweetFields = tweetFields
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === s"https://api.twitter.com/2/users/by/username/$username"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildTweetFieldsParam(tweetFields)
).mkString("&"))
}
.respondWithOk
.await
}
}
}
|
harrywada/twitter4s | src/main/scala/com/danielasfregola/twitter4s/entities/v2/Withheld.scala | package com.danielasfregola.twitter4s.entities.v2
import com.danielasfregola.twitter4s.entities.v2.enums.WithheldScope.WithheldScope
final case class Withheld(copyright: Boolean,
country_codes: Seq[String],
scope: WithheldScope)
|
harrywada/twitter4s | src/main/scala/com/danielasfregola/twitter4s/http/clients/rest/v2/tweets/TwitterTimelinesClient.scala | package com.danielasfregola.twitter4s.http.clients.rest.v2.tweets
import com.danielasfregola.twitter4s.entities.RatedData
import com.danielasfregola.twitter4s.entities.v2.enums.expansions.TweetExpansions.Expansions
import com.danielasfregola.twitter4s.entities.v2.enums.fields.MediaFields.MediaFields
import com.danielasfregola.twitter4s.entities.v2.enums.fields.PlaceFields.PlaceFields
import com.danielasfregola.twitter4s.entities.v2.enums.fields.PollFields.PollFields
import com.danielasfregola.twitter4s.entities.v2.enums.fields.TweetFields.TweetFields
import com.danielasfregola.twitter4s.entities.v2.enums.fields.UserFields.UserFields
import com.danielasfregola.twitter4s.entities.v2.enums.rest.TimelineExclude.TimelineExclude
import com.danielasfregola.twitter4s.entities.v2.responses.TweetsResponse
import com.danielasfregola.twitter4s.http.clients.rest.RestClient
import com.danielasfregola.twitter4s.http.clients.rest.v2.tweets.paramaters.{TimelineMentionsParameters, TimelineTweetsParameters}
import com.danielasfregola.twitter4s.util.Configurations.{apiTwitterUrl, twitterVersionV2}
import java.time.Instant
import scala.concurrent.Future
/** Implements the available requests for the v2 `timelines` resource. */
trait TwitterTimelinesClient {
protected val restClient: RestClient
private val baseTimelinesUrl = s"$apiTwitterUrl/$twitterVersionV2/users"
/** Returns Tweets composed by a single user, specified by the requested user ID.
* By default, the most recent ten Tweets are returned per request. Using pagination, the most recent 3,200 Tweets
* can be retrieved.
*
* The Tweets returned by this endpoint count towards the Project-level
* <a href="https://developer.twitter.com/en/docs/projects/overview#tweet-cap" target="_blank">
* Tweet cap</a>.
*
* For more information see
* <a href="https://developer.twitter.com/en/docs/twitter-api/tweets/timelines/api-reference/get-users-id-tweets" target="_blank">
* https://developer.twitter.com/en/docs/twitter-api/tweets/timelines/api-reference/get-users-id-tweets</a>
*
* @param userId : Unique identifier of the Twitter account (user ID) for whom to return results.
* User ID can be referenced using the
* <a href="https://developer.twitter.com/en/docs/twitter-api/users/lookup/introduction" target="_blank">
* user/lookup</a> endpoint.
* More information on Twitter IDs is
* <a href="https://developer.twitter.com/en/docs/twitter-ids" target="_blank">
* here</a>.
* @param startTime : Optional, by default is `None`
* The oldest or earliest UTC timestamp from which the Tweets will be provided. Only the 3200 most
* recent Tweets are available. Timestamp is in second granularity and is inclusive
* (for example, 12:00:01 includes the first second of the minute).
* Minimum allowable time is 2010-11-06T00:00:00Z
*
* Please note that this parameter does not support a millisecond value.
* @param endTime : Optional, by default is `None`
* The newest or most recent UTC timestamp from which the Tweets will be provided. Only the 3200
* most recent Tweets are available. Timestamp is in second granularity and is inclusive
* (for example, 12:00:01 includes the first second of the minute).
* Minimum allowable time is 2010-11-06T00:00:01Z
*
* Please note that this parameter does not support a millisecond value.
* @param maxResults : Optional, by default is `None`
* Specifies the number of Tweets to try and retrieve, up to a maximum of 100 per distinct request.
* By default, 10 results are returned if this parameter is not supplied. The minimum permitted
* value is 5. It is possible to receive less than the `max_results` per request throughout the
* pagination process.
* @param paginationToken : Optional, by default is `None`
* This parameter is used to move forwards or backwards through 'pages' of results, based on
* the value of the `next_token` or `previous_token` in the response. The value used with the
* parameter is pulled directly from the response provided by the API, and should not be modified.
* @param sinceId : Optional, by default is `None`
* Returns results with a Tweet ID greater than (that is, more recent than) the specified
* 'since' Tweet ID. Only the 3200 most recent Tweets are available. The result will exclude
* the `since_id`. If the limit of Tweets has occurred since the `since_id`,
* the `since_id` will be forced to the oldest ID available.
* @param untilId : Optional, by default is `None`
* Returns results with a Tweet ID less less than (that is, older than) the specified 'until'
* Tweet ID. Only the 3200 most recent Tweets are available. The result will exclude the
* `until_id`. If the limit of Tweets has occurred since the `until_id`, the `until_id` will be
* forced to the most recent ID available.
* @param exclude : Optional, by default is `Seq.empty`
* List of the types of Tweets to exclude from the response. When `exclude=retweets` is used,
* the maximum historical Tweets returned is still 3200. When the `exclude=replies` parameter
* is used for any value, only the most recent 800 Tweets are available.
* @param expansions : Optional, by default is `Seq.empty`
* Expansions enable you to request additional data objects that relate to the originally
* returned Tweets. The ID that represents the expanded data object will be included directly
* in the Tweet data object, but the expanded object metadata will be returned within the includes
* response object, and will also include the ID so that you can match this data object to the
* original Tweet object.
* @param tweetFields : Optional, by default is `Seq.empty`
* This <a href="https://developer.twitter.com/en/docs/twitter-api/fields">fields</a> parameter
* enables you to select which specific
* <a href="https://developer.twitter.com/en/docs/twitter-api/data-dictionary/object-model/tweet">Tweet fields</a>
* will deliver in each returned Tweet object. You can also include the `referenced_tweets.id` expansion
* to return the specified fields for both the original Tweet and any included referenced Tweets.
* The requested Tweet fields will display in both the original Tweet data object, as well as in
* the referenced Tweet expanded data object that will be located in the includes data object.
* @param userFields : Optional, by default is `Seq.empty`
* This <a href="https://developer.twitter.com/en/docs/twitter-api/fields">fields</a> parameter
* enables you to select which specific
* <a href="https://developer.twitter.com/en/docs/twitter-api/data-dictionary/object-model/user">user fields</a>
* will deliver in each returned Tweet. While the user ID will be located in the original Tweet object,
* you will find this ID and all additional user fields in the includes data object.
*
* @return : The representation of the search results.
*/
def lookupTimeline(userId: String,
startTime: Option[Instant] = None,
endTime: Option[Instant] = None,
maxResults: Option[Int] = None,
paginationToken: Option[String] = None,
sinceId: Option[String] = None,
untilId: Option[String] = None,
exclude: Seq[TimelineExclude] = Seq.empty[TimelineExclude],
expansions: Seq[Expansions] = Seq.empty[Expansions],
tweetFields: Seq[TweetFields] = Seq.empty[TweetFields],
userFields: Seq[UserFields] = Seq.empty[UserFields]): Future[RatedData[TweetsResponse]] = {
val parameters = TimelineTweetsParameters(
start_time = startTime,
end_time = endTime,
max_results = maxResults,
pagination_token = paginationToken,
since_id = sinceId,
until_id = untilId,
exclude = exclude,
expansions = expansions,
`media.fields` = Seq.empty[MediaFields], // TODO: Pending addition of media model
`place.fields` = Seq.empty[PlaceFields], // TODO: Pending addition of place model
`poll.fields` = Seq.empty[PollFields], // TODO: Pending addition of poll fields
`tweet.fields` = tweetFields,
`user.fields` = userFields
)
genericGetTweets(
userId,
parameters
)
}
/** Returns Tweets mentioning a single user specified by the requested user ID. By default, the most recent ten
* Tweets are returned per request. Using pagination, up to the most recent 800 Tweets can be retrieved.
*
* The Tweets returned by this endpoint count towards the Project-level
* <a href="https://developer.twitter.com/en/docs/projects/overview#tweet-cap" target="_blank">
* Tweet cap</a>.
*
* For more information see
* <a href="https://developer.twitter.com/en/docs/twitter-api/tweets/timelines/api-reference/get-users-id-tweets" target="_blank">
* https://developer.twitter.com/en/docs/twitter-api/tweets/timelines/api-reference/get-users-id-tweets</a>
*
* @param userId : Unique identifier of the Twitter account (user ID) for whom to return results.
* User ID can be referenced using the
* <a href="https://developer.twitter.com/en/docs/twitter-api/users/lookup/introduction" target="_blank">
* user/lookup</a> endpoint.
* More information on Twitter IDs is
* <a href="https://developer.twitter.com/en/docs/twitter-ids" target="_blank">
* here</a>.
* @param startTime : Optional, by default is `None`
* The oldest or earliest UTC timestamp from which the Tweets will be provided. Only the 3200 most
* recent Tweets are available. Timestamp is in second granularity and is inclusive
* (for example, 12:00:01 includes the first second of the minute).
* Minimum allowable time is 2010-11-06T00:00:00Z
*
* Please note that this parameter does not support a millisecond value.
* @param endTime : Optional, by default is `None`
* The newest or most recent UTC timestamp from which the Tweets will be provided. Only the 3200
* most recent Tweets are available. Timestamp is in second granularity and is inclusive
* (for example, 12:00:01 includes the first second of the minute).
* Minimum allowable time is 2010-11-06T00:00:01Z
*
* Please note that this parameter does not support a millisecond value.
* @param maxResults : Optional, by default is `None`
* Specifies the number of Tweets to try and retrieve, up to a maximum of 100 per distinct request.
* By default, 10 results are returned if this parameter is not supplied. The minimum permitted
* value is 5. It is possible to receive less than the `max_results` per request throughout the
* pagination process.
* @param paginationToken : Optional, by default is `None`
* This parameter is used to move forwards or backwards through 'pages' of results, based on
* the value of the `next_token` or `previous_token` in the response. The value used with the
* parameter is pulled directly from the response provided by the API, and should not be modified.
* @param sinceId : Optional, by default is `None`
* Returns results with a Tweet ID greater than (that is, more recent than) the specified
* 'since' Tweet ID. Only the 3200 most recent Tweets are available. The result will exclude
* the `since_id`. If the limit of Tweets has occurred since the `since_id`,
* the `since_id` will be forced to the oldest ID available.
* @param untilId : Optional, by default is `None`
* Returns results with a Tweet ID less less than (that is, older than) the specified 'until'
* Tweet ID. Only the 3200 most recent Tweets are available. The result will exclude the
* `until_id`. If the limit of Tweets has occurred since the `until_id`, the `until_id` will be
* forced to the most recent ID available.
* @param expansions : Optional, by default is `Seq.empty`
* Expansions enable you to request additional data objects that relate to the originally
* returned Tweets. The ID that represents the expanded data object will be included directly
* in the Tweet data object, but the expanded object metadata will be returned within the includes
* response object, and will also include the ID so that you can match this data object to the
* original Tweet object.
* @param tweetFields : Optional, by default is `Seq.empty`
* This <a href="https://developer.twitter.com/en/docs/twitter-api/fields">fields</a> parameter
* enables you to select which specific
* <a href="https://developer.twitter.com/en/docs/twitter-api/data-dictionary/object-model/tweet">Tweet fields</a>
* will deliver in each returned Tweet object. You can also include the `referenced_tweets.id` expansion
* to return the specified fields for both the original Tweet and any included referenced Tweets.
* The requested Tweet fields will display in both the original Tweet data object, as well as in
* the referenced Tweet expanded data object that will be located in the includes data object.
* @param userFields : Optional, by default is `Seq.empty`
* This <a href="https://developer.twitter.com/en/docs/twitter-api/fields">fields</a> parameter
* enables you to select which specific
* <a href="https://developer.twitter.com/en/docs/twitter-api/data-dictionary/object-model/user">user fields</a>
* will deliver in each returned Tweet. While the user ID will be located in the original Tweet object,
* you will find this ID and all additional user fields in the includes data object.
*
* @return : The representation of the search results.
*/
def lookupMentions(userId: String,
startTime: Option[Instant] = None,
endTime: Option[Instant] = None,
maxResults: Option[Int] = None,
paginationToken: Option[String] = None,
sinceId: Option[String] = None,
untilId: Option[String] = None,
expansions: Seq[Expansions] = Seq.empty[Expansions],
tweetFields: Seq[TweetFields] = Seq.empty[TweetFields],
userFields: Seq[UserFields] = Seq.empty[UserFields]): Future[RatedData[TweetsResponse]] = {
val parameters = TimelineMentionsParameters(
start_time = startTime,
end_time = endTime,
max_results = maxResults,
pagination_token = paginationToken,
since_id = sinceId,
until_id = untilId,
expansions = expansions,
`media.fields` = Seq.empty[MediaFields], // TODO: Pending addition of media model
`place.fields` = Seq.empty[PlaceFields], // TODO: Pending addition of place model
`poll.fields` = Seq.empty[PollFields], // TODO: Pending addition of poll fields
`tweet.fields` = tweetFields,
`user.fields` = userFields
)
genericGetMentions(
userId,
parameters
)
}
private def genericGetTweets(userId: String, parameters: TimelineTweetsParameters): Future[RatedData[TweetsResponse]] = {
import restClient._
Get(
s"$baseTimelinesUrl/$userId/tweets",
parameters
).respondAsRated[TweetsResponse]
}
private def genericGetMentions(userId: String, parameters: TimelineMentionsParameters): Future[RatedData[TweetsResponse]] = {
import restClient._
Get(
s"$baseTimelinesUrl/$userId/mentions",
parameters
).respondAsRated[TweetsResponse]
}
}
|
harrywada/twitter4s | src/main/scala/com/danielasfregola/twitter4s/entities/v2/responses/UserResponse.scala | <reponame>harrywada/twitter4s<gh_stars>0
package com.danielasfregola.twitter4s.entities.v2.responses
import com.danielasfregola.twitter4s.entities.v2.{Error, UserIncludes, User}
case class UserResponse(data: Option[User],
includes: Option[UserIncludes],
errors: Seq[Error])
|
harrywada/twitter4s | src/main/scala/com/danielasfregola/twitter4s/entities/v2/UserEntities.scala | <reponame>harrywada/twitter4s
package com.danielasfregola.twitter4s.entities.v2
final case class UserEntities(url: Option[UserURLContainer],
description: Option[UserEntitiesDescription])
final case class UserURLContainer(urls: Seq[UserEntitiesURL])
final case class UserEntitiesURL(start: Int,
end: Int,
url: String,
expanded_url: String,
display_url: String)
final case class UserEntitiesDescription(urls: Seq[UserEntitiesURL],
hashtags: Seq[UserEntitiesHashtag],
mentions: Seq[UserEntitiesMention],
cashtags: Seq[UserEntitiesCashtag])
final case class UserEntitiesHashtag(start: Int,
end: Int,
tag: String)
final case class UserEntitiesMention(start: Int,
end: Int,
username: String)
final case class UserEntitiesCashtag(start: Int,
end: Int,
tag: String)
|
harrywada/twitter4s | src/test/scala/com/danielasfregola/twitter4s/http/clients/rest/v2/tweets/fixtures/timelines/TweetsResponseFixture.scala | <gh_stars>0
package com.danielasfregola.twitter4s.http.clients.rest.v2.tweets.fixtures.timelines
import com.danielasfregola.twitter4s.entities.v2._
import com.danielasfregola.twitter4s.entities.v2.enums.{CoordinatesType, ReferencedTweetType, TweetReplySetting}
import com.danielasfregola.twitter4s.entities.v2.responses.TweetsResponse
import java.time.Instant
object TweetsResponseFixture {
val fixture: TweetsResponse = TweetsResponse(
data = Seq(Tweet(
id = "2310484964373377688",
text = "Lorem ipsum dolor sit amet, consectetur adipiscing elit.",
attachments = Some(TweetAttachments(
media_keys = Seq(
"13_9995568729608410852",
"14_0381608460867506993"
),
poll_ids = Seq(
"0419198663175162881"
)
)),
author_id = Some("6338724728067829004"),
context_annotations = Some(Seq(
TweetContextAnnotation(
domain = TweetDomain(
id = "10",
name = "Person",
description = Some("Vestibulum pellentesque sed justo ac lacinia")
),
entity = TweetEntity(
id = "5233852574321016658",
name = "<NAME>",
description = Some("Phasellus Eu")
)
),
TweetContextAnnotation(
domain = TweetDomain(
id = "54",
name = "Musician",
description = Some("Nullam imperdiet lectus metus")
),
entity = TweetEntity(
id = "6836362243680041612",
name = "<NAME>",
description = Some("Phasell<NAME>u")
)
)
)),
conversation_id = Some("0471114572504780656"),
created_at = Some(Instant.parse("2020-05-15T16:03:42.000Z")),
entities = Some(TweetEntities(
annotations = Seq(
TweetEntitiesAnnotation(
start = 144,
end = 150,
probability = 0.626f,
`type` = "Product",
normalized_text = "Twitter"
)
),
cashtags = Seq(
TweetEntitiesCashtag(
start = 41,
end = 44,
tag = "GE"
)
),
urls = Seq(
TweetEntitiesURL(
start = 257,
end = 280,
url = "https://t.co/sodales",
expanded_url = "https://www.google.com/sodales",
display_url = "example.google.com/sodales",
unwound_url = Some("https://www.google.com/sodales")
),
),
mentions = Seq(
TweetEntitiesMention(
start = 105,
end = 121,
username = Some("SuspendisseAtNunc"),
id = Some("2894469526322928935")
),
TweetEntitiesMention(
start = 125,
end = 138,
username = Some("SuspendisseAtNuncPosuere"),
id = Some("6279687081065223918")
)
),
hashtags = Seq(
TweetEntitiesHashtag(
start = 47,
end = 60,
tag = "SuspendisseAtNunc"
),
TweetEntitiesHashtag(
start = 171,
end = 194,
tag = "SuspendisseNunc"
)
),
)),
geo = Some(TweetGeo(
coordinates = Some(TweetCoordinates(
`type` = CoordinatesType.Point,
coordinates = (40.74118764, -73.9998279)
)),
place_id = Some("0fc2bbe1f995b733")
)),
in_reply_to_user_id = Some("1600750904601052113"),
lang = Some("en"),
non_public_metrics = Some(TweetNonPublicMetrics(
user_profile_clicks = 0,
impression_count = 29,
url_link_clicks = Some(12)
)),
organic_metrics = Some(TweetOrganicMetrics(
retweet_count = 0,
url_link_clicks = Some(12),
reply_count = 0,
like_count = 1,
user_profile_clicks = 0,
impression_count = 29
)),
possibly_sensitive = Some(true),
promoted_metrics = Some(TweetPromotedMetrics(
impression_count = 29,
url_link_clicks = Some(12),
user_profile_clicks = 0,
retweet_count = 0,
reply_count = 0,
like_count = 1
)),
public_metrics = Some(TweetPublicMetrics(
retweet_count = 0,
reply_count = 0,
like_count = 1,
quote_count = 0
)),
referenced_tweets = Some(Seq(
TweetReferencedTweet(
`type` = ReferencedTweetType.Retweeted,
id = "4653693971459419590"
)
)),
reply_settings = Some(TweetReplySetting.Everyone),
source = Some("Twitter for iPhone"),
withheld = None
)),
includes = Some(TweetIncludes(
tweets = Seq(Tweet(
id = "6304480225832455363",
text = "Donec feugiat elit tellus, a ultrices elit sodales facilisis.",
attachments = None,
author_id = None,
context_annotations = None,
conversation_id = None,
created_at = None,
entities = None,
geo = None,
in_reply_to_user_id = None,
lang = None,
non_public_metrics = None,
organic_metrics = None,
possibly_sensitive = None,
promoted_metrics = None,
public_metrics = None,
referenced_tweets = None,
reply_settings = None,
source = None,
withheld = None
)),
users = Seq(
User(
id = "3955854555026519618",
name = "AliquamOrciEros",
username = "aliquamorcieros",
created_at = None,
`protected` = None,
withheld = None,
location = None,
url = None,
description = None,
verified = None,
entities = None,
profile_image_url = None,
public_metrics = None,
pinned_tweet_id = None
),
User(
id = "6747736441958634428",
name = "<NAME>",
username = "suspendisseatnunc",
created_at = None,
`protected` = None,
withheld = None,
location = None,
url = None,
description = None,
verified = None,
entities = None,
profile_image_url = None,
public_metrics = None,
pinned_tweet_id = None
)
)
)),
meta = Some(Meta(
oldest_id = Some("1356759580211109999"),
newest_id = Some("1410697282811569999"),
result_count = 7,
next_token = Some("<PASSWORD>"),
previous_token = None
)),
errors = Seq.empty[Error]
)
}
|
harrywada/twitter4s | src/main/scala/com/danielasfregola/twitter4s/entities/v2/responses/TweetResponse.scala | package com.danielasfregola.twitter4s.entities.v2.responses
import com.danielasfregola.twitter4s.entities.v2.{Error, TweetIncludes, Tweet}
final case class TweetResponse(data: Option[Tweet],
includes: Option[TweetIncludes],
errors: Seq[Error])
|
harrywada/twitter4s | src/main/scala/com/danielasfregola/twitter4s/entities/v2/User.scala | package com.danielasfregola.twitter4s.entities.v2
import java.time.Instant
final case class User(id: String,
name: String,
username: String,
created_at: Option[Instant],
`protected`: Option[Boolean],
withheld: Option[Withheld],
location: Option[String],
url: Option[String],
description: Option[String],
verified: Option[Boolean],
entities: Option[UserEntities],
profile_image_url: Option[String],
public_metrics: Option[UserPublicMetrics],
pinned_tweet_id: Option[String])
final case class UserPublicMetrics(followers_count: Int,
following_count: Int,
tweet_count: Int,
listed_count: Int)
|
harrywada/twitter4s | src/main/scala/com/danielasfregola/twitter4s/http/clients/rest/v2/users/parameters/UsersByUsernamesParameters.scala | package com.danielasfregola.twitter4s.http.clients.rest.v2.users.parameters
import com.danielasfregola.twitter4s.entities.v2.enums.expansions.UserExpansions.Expansions
import com.danielasfregola.twitter4s.entities.v2.enums.fields.TweetFields.TweetFields
import com.danielasfregola.twitter4s.entities.v2.enums.fields.UserFields.UserFields
import com.danielasfregola.twitter4s.http.marshalling.Parameters
private[twitter4s] final case class UsersByUsernamesParameters(usernames: Seq[String],
expansions: Seq[Expansions],
`tweet.fields`: Seq[TweetFields] = Seq.empty[TweetFields],
`user.fields`: Seq[UserFields] = Seq.empty[UserFields]) extends Parameters
|
harrywada/twitter4s | src/main/scala/com/danielasfregola/twitter4s/entities/v2/enums/expansions/TweetExpansions.scala | <filename>src/main/scala/com/danielasfregola/twitter4s/entities/v2/enums/expansions/TweetExpansions.scala
package com.danielasfregola.twitter4s.entities.v2.enums.expansions
object TweetExpansions extends Enumeration {
type Expansions = Value
// val `Attachments.PollIds` = Value("attachments.poll_ids") // TODO: Pending addition of poll model
// val `Attachments.MediaKeys` = Value("attachments.media_keys") // TODO: Pending addition of media model
val AuthorId = Value("author_id")
val `Entities.Mentions.Username` = Value("entities.mentions.username")
// val `Geo.PlaceId` = Value("geo.place_id") // TODO: Pending addition of place model
val InReplyToUser = Value("in_reply_to_user_id")
val `ReferencedTweets.Id` = Value("referenced_tweets.id")
val `ReferencedTweets.Id.AuthorId` = Value("referenced_tweets.id.author_id")
}
|
harrywada/twitter4s | src/main/scala/com/danielasfregola/twitter4s/entities/v2/UserIncludes.scala | <gh_stars>0
package com.danielasfregola.twitter4s.entities.v2
case class UserIncludes(tweets: Seq[Tweet],
users: Seq[User])
|
harrywada/twitter4s | src/main/scala/com/danielasfregola/twitter4s/entities/v2/enums/expansions/UserExpansions.scala | <reponame>harrywada/twitter4s
package com.danielasfregola.twitter4s.entities.v2.enums.expansions
object UserExpansions extends Enumeration {
type Expansions = Value
val PinnedTweetId = Value("pinned_tweet_id")
}
|
harrywada/twitter4s | src/main/scala/com/danielasfregola/twitter4s/entities/v2/TweetIncludes.scala | package com.danielasfregola.twitter4s.entities.v2
final case class TweetIncludes(tweets: Seq[Tweet],
users: Seq[User]
// places: Seq[Place], // TODO: Pending addition of places model
// media: Seq[Media], // TODO: Pending addition of media model
// polls: Seq[Polls] // TODO pending addition of polls model
)
|
harrywada/twitter4s | src/main/scala/com/danielasfregola/twitter4s/TwitterRestV2Client.scala | package com.danielasfregola.twitter4s
import akka.actor.ActorSystem
import com.danielasfregola.twitter4s.entities.{AccessToken, ConsumerToken}
import com.danielasfregola.twitter4s.http.clients.rest.RestClient
import com.danielasfregola.twitter4s.http.clients.rest.v2.tweets.{TwitterTimelinesClient, TwitterTweetLookupClient}
import com.danielasfregola.twitter4s.http.clients.rest.v2.users.TwitterUserLookupClient
import com.danielasfregola.twitter4s.util.Configurations._
import com.danielasfregola.twitter4s.util.SystemShutdown
/** Represents the functionalities offered by the V2 Twitter REST API
*/
class TwitterRestV2Client(val consumerToken: ConsumerToken, val accessToken: AccessToken)(implicit _system: ActorSystem =
ActorSystem("twitter4s-rest-v2"))
extends V2RestClients
with SystemShutdown {
protected val system = _system
protected val restClient = new RestClient(consumerToken, accessToken)
}
trait V2RestClients
extends TwitterTimelinesClient
with TwitterTweetLookupClient
with TwitterUserLookupClient
object TwitterRestV2Client {
def apply(): TwitterRestClient = {
val consumerToken = ConsumerToken(key = consumerTokenKey, secret = consumerTokenSecret)
val accessToken = AccessToken(key = accessTokenKey, secret = accessTokenSecret)
apply(consumerToken, accessToken)
}
def apply(consumerToken: ConsumerToken, accessToken: AccessToken): TwitterRestClient =
new TwitterRestClient(consumerToken, accessToken)
def withActorSystem(system: ActorSystem): TwitterRestClient = {
val consumerToken = ConsumerToken(key = consumerTokenKey, secret = consumerTokenSecret)
val accessToken = AccessToken(key = accessTokenKey, secret = accessTokenSecret)
withActorSystem(consumerToken, accessToken)(system)
}
def withActorSystem(consumerToken: ConsumerToken, accessToken: AccessToken)(system: ActorSystem): TwitterRestClient =
new TwitterRestClient(consumerToken, accessToken)(system)
}
|
harrywada/twitter4s | src/main/scala/com/danielasfregola/twitter4s/http/serializers/EnumFormats.scala | <reponame>harrywada/twitter4s<gh_stars>1-10
package com.danielasfregola.twitter4s.http.serializers
import com.danielasfregola.twitter4s.entities.enums._
import com.danielasfregola.twitter4s.entities.v2.enums._
import org.json4s.Formats
import org.json4s.ext.EnumNameSerializer
private[twitter4s] object EnumFormats extends FormatsComposer {
override def compose(f: Formats): Formats =
f +
new EnumNameSerializer(Alignment) +
new EnumNameSerializer(ContributorType) +
new EnumNameSerializer(DisconnectionCode) +
new EnumNameSerializer(SimpleEventCode) +
new EnumNameSerializer(TweetEventCode) +
new EnumNameSerializer(TwitterListEventCode) +
new EnumNameSerializer(Granularity) +
new EnumNameSerializer(Hour) +
new EnumNameSerializer(Language) +
new EnumNameSerializer(Measure) +
new EnumNameSerializer(Mode) +
new EnumNameSerializer(Resource) +
new EnumNameSerializer(ResultType) +
new EnumNameSerializer(TimeZone) +
new EnumNameSerializer(WidgetType) +
new EnumNameSerializer(WithFilter) +
new EnumNameSerializer(CoordinatesType) +
new EnumNameSerializer(ReferencedTweetType) +
new EnumNameSerializer(TweetReplySetting) +
new EnumNameSerializer(WithheldScope)
}
|
harrywada/twitter4s | src/test/scala/com/danielasfregola/twitter4s/http/clients/rest/v2/tweets/TwitterTimelinesClientSpec.scala | <reponame>harrywada/twitter4s
package com.danielasfregola.twitter4s.http.clients.rest.v2.tweets
import akka.http.scaladsl.model.HttpMethods
import com.danielasfregola.twitter4s.entities.RatedData
import com.danielasfregola.twitter4s.entities.v2.enums.expansions.TweetExpansions.{Expansions => TweetExpansions}
import com.danielasfregola.twitter4s.entities.v2.enums.fields.TweetFields.TweetFields
import com.danielasfregola.twitter4s.entities.v2.enums.fields.UserFields.UserFields
import com.danielasfregola.twitter4s.entities.v2.enums.rest.TimelineExclude
import com.danielasfregola.twitter4s.entities.v2.enums.rest.TimelineExclude.TimelineExclude
import com.danielasfregola.twitter4s.entities.v2.responses.TweetsResponse
import com.danielasfregola.twitter4s.helpers.ClientSpec
import com.danielasfregola.twitter4s.http.clients.rest.v2.tweets.fixtures.timelines.TweetsResponseFixture
import com.danielasfregola.twitter4s.http.clients.rest.v2.utils.V2SpecQueryHelper
class TwitterTimelinesClientSpec extends ClientSpec {
class TwitterTimelinesClientSpecContext extends RestClientSpecContext with TwitterTimelinesClient
"Twitter Tweet Lookup Client" should {
"lookup timelines" in new TwitterTimelinesClientSpecContext {
val userId = "123"
val result: RatedData[TweetsResponse] = when(lookupTimeline(userId))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === s"https://api.twitter.com/2/users/$userId/tweets"
request.uri.rawQueryString === None
}
.respondWithRated("/twitter/rest/v2/tweets/timelines/tweets.json")
.await
result.rate_limit === rateLimit
result.data === TweetsResponseFixture.fixture
}
"lookup timelines with expansions" in new TwitterTimelinesClientSpecContext {
val userId = "123"
val expansions: Seq[TweetExpansions] = V2SpecQueryHelper.allTweetExpansions
when(lookupTimeline(
userId = userId,
expansions = expansions
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === s"https://api.twitter.com/2/users/$userId/tweets"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildTweetExpansions(expansions)
).mkString("&"))
}
.respondWithOk
.await
}
"lookup timelines with exclude" in new TwitterTimelinesClientSpecContext {
val userId = "123"
val exclusions: Seq[TimelineExclude] = Seq(
TimelineExclude.Replies,
TimelineExclude.Retweets
)
when(lookupTimeline(
userId = userId,
exclude = exclusions
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === s"https://api.twitter.com/2/users/$userId/tweets"
request.uri.rawQueryString === Some("exclude=replies%2Cretweets")
}
.respondWithOk
.await
}
"lookup timelines with tweet fields" in new TwitterTimelinesClientSpecContext {
val userId = "123"
val tweetFields: Seq[TweetFields] = V2SpecQueryHelper.allTweetFields
when(lookupTimeline(
userId = userId,
tweetFields = tweetFields
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === s"https://api.twitter.com/2/users/$userId/tweets"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildTweetFieldsParam(tweetFields)
).mkString("&"))
}
.respondWithOk
.await
}
"lookup timelines with user fields" in new TwitterTimelinesClientSpecContext {
val userId = "123"
val userFields: Seq[UserFields] = V2SpecQueryHelper.allUserFields
when(lookupTimeline(
userId = userId,
userFields = userFields
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === s"https://api.twitter.com/2/users/$userId/tweets"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildUserFieldsParam(userFields)
).mkString("&"))
}
.respondWithOk
.await
}
"lookup mentions" in new TwitterTimelinesClientSpecContext {
val userId = "123"
val result: RatedData[TweetsResponse] = when(lookupMentions(userId))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === s"https://api.twitter.com/2/users/$userId/mentions"
request.uri.rawQueryString === None
}
.respondWithRated("/twitter/rest/v2/tweets/timelines/tweets.json")
.await
result.rate_limit === rateLimit
result.data === TweetsResponseFixture.fixture
}
"lookup mentions with expansions" in new TwitterTimelinesClientSpecContext {
val userId = "123"
val expansions: Seq[TweetExpansions] = V2SpecQueryHelper.allTweetExpansions
when(lookupMentions(
userId = userId,
expansions = expansions
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === s"https://api.twitter.com/2/users/$userId/mentions"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildTweetExpansions(expansions)
).mkString("&"))
}
.respondWithOk
.await
}
"lookup mentions with tweet fields" in new TwitterTimelinesClientSpecContext {
val userId = "123"
val tweetFields: Seq[TweetFields] = V2SpecQueryHelper.allTweetFields
when(lookupMentions(
userId = userId,
tweetFields = tweetFields
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === s"https://api.twitter.com/2/users/$userId/mentions"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildTweetFieldsParam(tweetFields)
).mkString("&"))
}
.respondWithOk
.await
}
"lookup mentions with user fields" in new TwitterTimelinesClientSpecContext {
val userId = "123"
val userFields: Seq[UserFields] = V2SpecQueryHelper.allUserFields
when(lookupMentions(
userId = userId,
userFields = userFields
))
.expectRequest { request =>
request.method === HttpMethods.GET
request.uri.endpoint === s"https://api.twitter.com/2/users/$userId/mentions"
request.uri.rawQueryString === Some(Seq(
V2SpecQueryHelper.buildUserFieldsParam(userFields)
).mkString("&"))
}
.respondWithOk
.await
}
}
}
|
sam-ma/hawtdispatch | hawtdispatch-scala/src/test/scala/org/fusesource/hawtdispatch/SocketTest.scala | <reponame>sam-ma/hawtdispatch
/**
* Copyright (C) 2012 FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.hawtdispatch
import org.scalatest._
import junit.JUnitRunner
import org.junit.runner.RunWith
import org.scalatest.matchers.ShouldMatchers
import java.nio._
import java.nio.channels._
import java.net._
import java.io.IOException
import org.fusesource.hawtdispatch._
import java.util.concurrent.atomic.AtomicInteger;
/**
* <p>
* </p>
*
* @author <a href="http://hiramchirino.com"><NAME></a>
*/
@RunWith(classOf[JUnitRunner])
class SocketTest extends FunSuite with ShouldMatchers {
test("Socket Disconnect Event") {
val connections = new AtomicInteger(0);
class Server() {
val channel = ServerSocketChannel.open();
channel.socket().bind(new InetSocketAddress(0));
channel.configureBlocking(false);
val queue = createQueue("server")
val accept_source = createSource(channel, SelectionKey.OP_ACCEPT, queue);
accept_source.onEvent {
val socket = channel.accept();
try {
new Session(socket).start()
} catch {
case e: Exception => socket.close
}
}
def start() = {
accept_source.resume
this
}
def stop() = {
accept_source.cancel
}
accept_source.onCancel {
channel.close();
}
def port = channel.socket.getLocalPort
}
class Session(val channel: SocketChannel) {
channel.configureBlocking(false);
channel.socket.setSoLinger(true, 0)
val buffer = ByteBuffer.allocate(1024);
val queue = createQueue("session")
val read_source = createSource(channel, SelectionKey.OP_READ, queue);
var closed = false
read_source.setEventHandler(^{
try {
buffer.clear
if( !channel.isConnected ) {
close
} else if (channel.read(buffer) == -1) {
if( !closed ) {
close
}
}
} catch {
case e:IOException =>
if( !closed ) {
close
}
}
})
connections.incrementAndGet
def start() = read_source.resume
def close() = {
if( !closed ) {
closed = true;
read_source.cancel
}
}
read_source.onCancel {
connections.decrementAndGet
channel.close
}
}
def connections_should_equal(value:Int):Unit = {
for( i <- 0 until 20 ) {
if( connections.get==value ) {
return;
}
Thread.sleep(100);
}
connections.get should equal(value)
}
val server = new Server()
server.start
for( i <- 0 until 20 ) {
connections_should_equal(0)
val socket = new Socket("localhost", server.port);
socket.setSoLinger(true, 0)
connections_should_equal(1)
socket.close
connections_should_equal(0)
}
}
}
|
sam-ma/hawtdispatch | hawtdispatch-example/src/main/scala/org/fusesource/hawtdispatch/example/UdpEchoServer.scala | /**
* Copyright (C) 2012 FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.hawtdispatch.example
import java.io.{IOException}
import org.fusesource.hawtdispatch._
import java.nio.ByteBuffer
import java.nio.channels.{DatagramChannel, SelectionKey, ServerSocketChannel, SocketChannel}
import java.net.{SocketAddress, InetSocketAddress}
import java.util.LinkedList
/**
* A udp echo server example. Concurrently reads and writes
* packets by using 2 dispatch queues. Uses a custom dispatch
* source to handle coalescing interaction events between
* the sender and receiver queues.
*
* @author <a href="http://hiramchirino.com"><NAME></a>
*/
object UdpEchoServer {
var port=4444;
def main(args:Array[String]):Unit = {
run
}
def run() = {
val server = new Server(port)
server.start
println("Press enter to shutdown.");
System.in.read
server.close
}
class Server(val port: Int) {
val channel = DatagramChannel.open();
channel.socket().bind(new InetSocketAddress(port));
channel.configureBlocking(false);
println("Listening on port: "+port);
object receiver {
// All mutable state in this object is modified while executing
// on this queue
val queue = createQueue("receive")
private var outbound = 0
private val outbound_max = 1024;
val read_events = createSource(channel, SelectionKey.OP_READ, queue);
read_events.onEvent {
try {
val buffer = ByteBuffer.allocate(1024);
var address: SocketAddress = channel.receive(buffer);
if( address!=null ) {
buffer.flip;
outbound += 1
sender.outbound_events.merge((buffer, address))
// stop receiving until the outbound is drained (aka: flow control)
if ( outbound_max < outbound ) {
read_events.suspend
}
}
} catch {
case e:IOException => close
}
}
// outbound_ack_events is used to let the sender know when the sends complete
val outbound_ack_events = createSource(EventAggregators.INTEGER_ADD, queue)
outbound_ack_events.onEvent {
outbound -= outbound_ack_events.getData()
if(read_events.isSuspended)
read_events.resume()
}
outbound_ack_events.resume();
}
object sender {
// All mutable state in this object is modified while executing
// on this queue
val queue = createQueue("send")
// pick up outbound events
private val outbound = new LinkedList[(ByteBuffer, SocketAddress)]
// outbound_events is an event bridge between the receiver and the sender event queues
// It will merge multiple events from the receiver queue into 1 event that gets delivered
// to the sender queue
val outbound_events = createSource(new ListEventAggregator[(ByteBuffer, SocketAddress)], queue)
outbound_events.onEvent {
for( value <- outbound_events.getData() ) {
outbound.add(value)
}
drainOutbound
}
outbound_events.resume();
// We need to drain the list of outbound packets when socket reports it
// can be written to.
val write_events = createSource(channel, SelectionKey.OP_WRITE, queue);
write_events.onEvent(drainOutbound)
def drainOutbound:Unit = try {
while(!outbound.isEmpty) {
val (buffer, address) = outbound.peek();
channel.send(buffer, address)
if(buffer.remaining()==0) {
// Packet sent, let the receive know in case he stopped.
receiver.outbound_ack_events.merge(1)
outbound.poll()
} else {
// Could not complete the write, we may need
// to resume the write source
if(write_events.isSuspended)
write_events.resume()
return
}
}
// Nothing left? then stop looking for write events
if(!write_events.isSuspended)
write_events.suspend
} catch {
case e:IOException => close
}
}
def start() = {
receiver.read_events.resume
}
def close() = {
receiver.read_events.cancel
sender.write_events.cancel
channel.close
}
}
}
|
sam-ma/hawtdispatch | hawtdispatch-scala/src/main/scala/org/fusesource/hawtdispatch/ListEventAggregator.scala | <filename>hawtdispatch-scala/src/main/scala/org/fusesource/hawtdispatch/ListEventAggregator.scala
/**
* Copyright (C) 2012 FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.hawtdispatch
import collection.mutable.ListBuffer
/**
* <p>
* An EventAggregator that coalesces object data obtained via calls to
* {@link CustomDispatchSource#merge(Object)} into a ListBuffer.
* </p>
*
* @author <a href="http://hiramchirino.com"><NAME></a>
*/
class ListEventAggregator[T] extends OrderedEventAggregator[T, ListBuffer[T]] {
def mergeEvent(previous:ListBuffer[T], event:T) = {
if( previous == null ) {
ListBuffer(event)
} else {
previous += event
}
}
def mergeEvents(previous:ListBuffer[T], events:ListBuffer[T]):ListBuffer[T] = {
previous ++= events
}
}
|
sam-ma/hawtdispatch | hawtdispatch-example/src/main/scala/org/fusesource/hawtdispatch/example/CustomDispatchSourceScala.scala | /**
* Copyright (C) 2012 FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.hawtdispatch.example
import java.util.concurrent.Semaphore
import org.fusesource.hawtdispatch._
import org.fusesource.hawtdispatch.EventAggregators
/**
* <p>
* </p>
*
* @author <a href="http://hiramchirino.com"><NAME></a>
*/
object CustomDispatchSourceScala {
def main(args: Array[String]): Unit = {
run
}
def run() = {
val done = new Semaphore(1 - (1000 * 1000))
val queue = createQueue()
val source = createSource(EventAggregators.INTEGER_ADD, queue)
source.setEventHandler(^{
val count = source.getData()
println("got: " + count)
done.release(count.intValue)
});
source.resume();
// Produce 1,000,000 concurrent merge events
for (i <- 0 until 1000) {
globalQueue {
for (j <- 0 until 1000) {
source.merge(1)
}
}
}
// Wait for all the event to arrive.
done.acquire()
}
}
|
sam-ma/hawtdispatch | hawtdispatch-example/src/main/scala/org/fusesource/hawtdispatch/example/EchoServer.scala | <reponame>sam-ma/hawtdispatch<filename>hawtdispatch-example/src/main/scala/org/fusesource/hawtdispatch/example/EchoServer.scala
/**
* Copyright (C) 2012 FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.hawtdispatch.example
import java.io.{IOException}
import java.net.{InetSocketAddress}
import org.fusesource.hawtdispatch._
import java.nio.ByteBuffer;
import java.nio.channels.SelectionKey;
import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel;
/**
* A simple echo server example.
*
* @author <a href="http://hiramchirino.com"><NAME></a>
*/
object EchoServer {
var port=4444;
def main(args:Array[String]):Unit = {
run
}
def run() = {
val server = new Server(port).start
println("Press enter to shutdown.");
System.in.read
server.stop
}
class Server(val port: Int) {
val channel = ServerSocketChannel.open();
channel.socket().bind(new InetSocketAddress(port));
channel.configureBlocking(false);
val queue = createQueue("server")
val accept_source = createSource(channel, SelectionKey.OP_ACCEPT, queue);
accept_source.setEventHandler(^ {
val socket = channel.accept();
try {
socket.configureBlocking(false);
new Session(socket).start()
} catch {
case e: Exception =>
socket.close
}
});
println("Listening on port: "+port);
def start() = {
accept_source.resume
this
}
def stop() = {
accept_source.cancel
}
accept_source.onCancel {
channel.close();
println("Closed port: "+port);
}
}
class Session(val channel: SocketChannel) {
val buffer = ByteBuffer.allocate(1024);
val queue = createQueue("session")
val read_source = createSource(channel, SelectionKey.OP_READ, queue);
val write_source = createSource(channel, SelectionKey.OP_WRITE, queue);
val remote_address = channel.socket.getRemoteSocketAddress.toString
def start() = {
println("Accepted connection from: "+remote_address);
read_source.resume
}
def close() = {
read_source.cancel
}
read_source.onCancel {
write_source.cancel
}
write_source.onCancel {
channel.close
println("Closed connection from: "+remote_address);
}
read_source.setEventHandler(^{
try {
if (channel.read(buffer) == -1) {
close
} else {
buffer.flip;
if (buffer.remaining > 0) {
read_source.suspend
write_source.resume
} else {
buffer.clear
}
}
}
catch {
case e:IOException => close
}
})
write_source.setEventHandler(^{
try {
channel.write(buffer)
if (buffer.remaining == 0) {
buffer.clear
write_source.suspend
read_source.resume
}
}
catch {
case e:IOException => close
}
})
}
}
|
sam-ma/hawtdispatch | hawtdispatch-website/ext/Website.scala | <reponame>sam-ma/hawtdispatch
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.fusesource.scalate.RenderContext
package
/**
* <p>
* </p>
*
* @author <a href="http://hiramchirino.com"><NAME></a>
*/
object Website {
val project_name= "HawtDispatch"
val project_slogan= "The libdispatch style API for Java and Scala"
val project_id= "hawtdispatch"
val project_jira_key= "HAWTDISPATCH"
val project_issue_url= "http://hawtdispatch.assembla.com/spaces/hawtdispatch/support/tickets"
val project_forums_url= "http://groups.google.com/group/hawtdispatch"
val project_wiki_url= "http://wiki.github.com/fusesource/hawtdispatch"
val project_logo= "/images/project-logo.png"
val project_version= "1.16"
val project_snapshot_version= "1.17-SNAPSHOT"
val project_versions = List(
project_version,
"1.15",
"1.14","1.13","1.12","1.11","1.10",
"1.9","1.8","1.7","1.6","1.5","1.4","1.3","1.2","1.1","1.0")
val project_keywords= "dispatch,executor,java,scala,libdispatch,gcd,actor,thread,pool"
// -------------------------------------------------------------------
val github_page = "http://github.com/fusesource/hawtdispatch"
val git_user_url = "git://github.com/fusesource/hawtdispatch.git"
val git_commiter_url = "git@github.com:fusesource/hawtdispatch.git"
val project_maven_groupId= "org.fusesource.hawtdispatch"
val project_maven_artifactId= "hawtdispatch"
val website_base_url= "http://hawtdispatch.fusesource.org"
val api_dir = "http://hawtdispatch.fusesource.org/versions/"+project_version+"/maven/hawtdispatch/apidocs/org/fusesource/hawtdispatch"
val scala_api_dir = "http://hawtdispatch.fusesource.org/versions/"+project_version+"/maven/hawtdispatch-scala/scaladocs"
} |
sam-ma/hawtdispatch | hawtdispatch-scala/src/main/scala/org/fusesource/hawtdispatch/ScalaDispatchHelpers.scala | <gh_stars>10-100
/**
* Copyright (C) 2012 FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.hawtdispatch
/**
* <p>
* Contains several helper method for working with retained objects
* and callbacks.
* </p>
*
* @author <a href="http://hiramchirino.com"><NAME></a>
*/
object ScalaDispatchHelpers {
/////////////////////////////////////////////////////////////////////
//
// Helpers for working with Retained objects.
//
/////////////////////////////////////////////////////////////////////
def using(resource: Retained): (=> Unit) => Runnable = {
using(resource, resource) _
}
def using(resources: Seq[Retained]): (=> Unit) => Runnable = {
using(resources, resources) _
}
def retaining(resource: Retained): (=> Unit) => Runnable = {
using(resource, null) _
}
def retaining(resources: Seq[Retained]): (=> Unit) => Runnable = {
using(resources, null) _
}
def releasing(resource: Retained): (=> Unit) => Runnable = {
using(null, resource) _
}
def releasing(resources: Seq[Retained]): (=> Unit) => Runnable = {
using(null, resources) _
}
private def using(retainedResource: Retained, releasedResource: Retained)(proc: => Unit): Runnable = {
if (retainedResource != null) {
retainedResource.retain
}
new Runnable() {
def run = {
try {
proc;
} finally {
if (releasedResource != null) {
releasedResource.release
}
}
}
}
}
private def using(retainedResources: Seq[Retained], releasedResources: Seq[Retained])(proc: => Unit): Runnable = {
retain(retainedResources)
new Runnable() {
def run = {
try {
proc;
} finally {
release(releasedResources)
}
}
}
}
def retain(retainedResources: Seq[Retained]) = {
if (retainedResources != null) {
for (resource <- retainedResources) {
resource.retain
}
}
}
def release(releasedResources: Seq[Retained]) = {
if (releasedResources != null) {
for (resource <- releasedResources) {
resource.release
}
}
}
}
|
sam-ma/hawtdispatch | hawtdispatch-scala/src/main/scala/org/fusesource/hawtdispatch.scala | <gh_stars>10-100
/**
* Copyright (C) 2012 FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource
import org.fusesource.hawtdispatch._
import java.nio.channels.SelectableChannel
import java.util.concurrent.{ExecutorService, CountDownLatch, Executor, TimeUnit}
import java.util.concurrent.atomic.AtomicBoolean
import java.io.Closeable
/**
* <p>
* </p>
*
* @author <a href="http://hiramchirino.com"><NAME></a>
*/
package object hawtdispatch {
implicit def ExecutorWrapper(x: Executor) = new RichExecutor(x)
implicit def DispatchQueueWrapper(x: DispatchQueue) = new RichDispatchQueue(x)
implicit def RichDispatchSourceWrapper(x: DispatchSource) = new RichDispatchSource(x)
trait RichDispatchObject {
def actual:DispatchObject
def target_=(queue: DispatchQueue) { actual.setTargetQueue( queue ) }
def target:DispatchQueue = actual.getTargetQueue
}
trait RichExecutorTrait {
protected def execute(task:Task):Unit
protected def execute(runnable:Runnable):Unit
/**
* <p>
* Submits a partial function for asynchronous execution on a dispatch queue.
* </p><p>
* Calls to {@link #execute(Task)} always return immediately after the runnable has
* been submitted, and never wait for the runnable to be executed.
* </p><p>
* The target queue determines whether the runnable will be invoked serially or
* concurrently with respect to other runnables submitted to that same queue.
* Serial queues are processed concurrently with with respect to each other.
* </p><p>
* The system will retain this queue until the runnable has finished.
* </p>
*
* @param task
* The function to submit to the dispatch queue.
*/
def apply(task: =>Unit) = execute(r(task _))
/**
* Creates a Task object which executes the supplied partial
* function on this executor when run.
*/
def runnable(task: =>Unit) = new Task() {
val target = r(task _)
def run: Unit = {
execute(target)
}
}
/**
* Same as {@link #apply(=>Unit)}
*/
def ^(task: =>Unit) = execute(r(task _))
/**
* <p>
* Submits a runnable for asynchronous execution on a dispatch queue.
* </p>
*
* @param task
* The runnable to submit to the dispatch queue.
*/
def <<(task: Runnable) = execute(task)
def <<(task: Task) = execute(task)
/**
* A right-associative version of the {@link #<<(Runnable)} method
*/
def >>:(task: Runnable) = execute(task)
def >>:(task: Task) = execute(task)
/**
* Executes the supplied function on the dispatch queue
* while blocking the calling thread as it waits for the response.
*/
def sync[T](func: =>T): T = future(func)()
/**
* Executes the supplied function on the dispatch queue
* and returns a Future that can be used to wait on the future
* result of the function.
*/
def future[T](func: =>T):Future[T] = {
val rc = Future[T]()
apply {
rc(func)
}
rc
}
def flatFuture[T](func: =>Future[T]):Future[T] = {
val rc = Future[T]()
apply {
func.onComplete(rc(_))
}
rc
}
}
class RichDispatchSource(val actual:DispatchSource) extends Proxy with RichDispatchObject {
def self = actual
def onEvent(task: =>Unit) { actual.setEventHandler( r(task _) ) }
def onCancel(task: =>Unit) { actual.setCancelHandler( r(task _) ) }
}
/**
* Enriches the Executor interfaces with additional Scala friendly methods.
*/
final class RichExecutor(val executor: Executor) extends Proxy with RichExecutorTrait {
def self: Any = executor
protected def execute(task:Task) = executor.execute(task)
protected def execute(task:Runnable) = executor.execute(task)
}
/**
* Enriches the DispatchQueue interfaces with additional Scala friendly methods.
*/
final class RichDispatchQueue(val actual: DispatchQueue) extends Proxy with RichExecutorTrait with RichDispatchObject {
def self = actual
protected def execute(task:Runnable) = actual.execute(task)
protected def execute(task:Task) = actual.execute(task)
def label_=(value: String) { actual.setLabel( value ) }
def label:String = actual.getLabel
/**
* <p>
* Submits a partial function for asynchronous execution on a dispatch queue after
* the specified time delay.
* </p>
*
* @param time
* The amount of time to delay
* @param unit
* The units of time the delay is specified in
* @param task
* The runnable to submit to the dispatch queue.
*/
def after(time:Long, unit:TimeUnit)(task: =>Unit) = actual.executeAfter(time, unit, r(task _))
/**
* <p>
* Submits a partial function for repetitive asynchronous execution on a dispatch queue
* each time specified time delay elapses. Returns a Closable which when closed will
* stop future executions of the task.
* </p>
*
* @param time
* The amount of time to delay
* @param unit
* The units of time the delay is specified in
* @param task
* The runnable to submit to the dispatch queue.
*/
def repeatAfter(time:Long, unit:TimeUnit)(task: =>Unit):Closeable = new Closeable {
val closed = new AtomicBoolean
def close: Unit = closed.set(true)
val action:Task = new Task() {
def run: Unit = {
if (!closed.get) {
try {
task
} catch {
case e:Throwable => e.printStackTrace
}
if (!closed.get) {
actual.executeAfter(time, unit, action)
}
}
}
}
actual.executeAfter(time, unit, action)
}
/**
* <p>
* Submits a runnable for asynchronous execution on a dispatch queue if the
* queue is not currently executing, otherwise if the queue is currently executing,
* then the runnable is directly executed.
* </p>
*
* @param task
* The runnable to submit to execute
*/
def <<|(task: Task) = {
if( actual.isExecuting ) {
try {
task.run
} catch {
case e:Exception =>
e.printStackTrace
}
} else {
actual.execute(task);
}
this
}
def <<|(task: Runnable):RichDispatchQueue = this <<|(new TaskWrapper(task))
/**
* <p>
* Submits a runnable for asynchronous execution on a dispatch queue if the
* queue is not currently executing, otherwise if the queue is currently executing,
* then the runnable is directly executed.
* </p>
*
* @param task
* The runnable to submit to execute
*/
def | (task: =>Unit ) = {
this.<<|( r(task _))
}
/**
* A right-associative version of the {@link #<<|(Runnable)} method
*/
def |>>:(task: Runnable) =this <<| task
def |>>:(task: Task) = this <<| task
}
/////////////////////////////////////////////////////////////////////
//
// re-export all the Dispatch static methods.
//
/////////////////////////////////////////////////////////////////////
/**
* Same as {@link Dispatch.getThreadQueues}
*/
def getThreadQueues(priority:DispatchPriority=DispatchPriority.DEFAULT) = Dispatch.getThreadQueues(priority)
/**
* Same as {@link Dispatch.getCurrentQueue}
*/
def getCurrentThreadQueue = Dispatch.getCurrentThreadQueue
/**
* Same as {@link Dispatch.createSource(EventAggregator, DispatchQueue)}
*/
def createSource[Event, MergedEvent](aggregator: EventAggregator[Event, MergedEvent], queue: DispatchQueue) = {
Dispatch.createSource(aggregator, queue)
}
/**
* Same as {@link Dispatch.createSource(SelectableChannel, Int, DispatchQueue)}
*/
def createSource(channel: SelectableChannel, interestOps: Int, queue: DispatchQueue) = {
Dispatch.createSource(channel, interestOps, queue)
}
/**
* Same as {@link Dispatch.getCurrentQueue}
*/
def getCurrentQueue = Dispatch.getCurrentQueue
/**
* Same as {@link Dispatch.createQueue(String)}
*/
def createQueue(label: String=null) = Dispatch.createQueue(label)
/**
* Same as {@link Dispatch.getGlobalQueue(DispatchPriority)}
*/
def getGlobalQueue(priority: DispatchPriority=DispatchPriority.DEFAULT) = Dispatch.getGlobalQueue(priority)
/**
* Same as {@link Dispatch.getGlobalQueue }
*/
def globalQueue = Dispatch.getGlobalQueue
/**
* Same as {@link Dispatch.NOOP }
*/
def NOOP = Dispatch.NOOP
/////////////////////////////////////////////////////////////////////
//
// Make it easier to create Task objects.
//
/////////////////////////////////////////////////////////////////////
/**
* Creates a runnable object from a partial function
*/
def ^(proc: => Unit): Task = r(proc _)
/**
* Creates a runnable object from a partial function
*/
private def r(proc: ()=>Unit): Task = new Task() {
def run() {
proc()
}
}
}
|
sam-ma/hawtdispatch | hawtdispatch-scala/src/main/scala/org/fusesource/hawtdispatch/TaskTracker.scala | <reponame>sam-ma/hawtdispatch
/**
* Copyright (C) 2012 FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.hawtdispatch
import java.util.HashSet
import java.util.concurrent.{CountDownLatch, TimeUnit}
import org.fusesource.hawtdispatch._
/**
* <p>
* A TaskTracker is used to track multiple async processing tasks and
* call a callback once they all complete.
* </p>
*
* @author <a href="http://hiramchirino.com"><NAME></a>
*/
class TaskTracker(val name:String="unknown", var timeout: Long = 0) {
private[this] val tasks = new HashSet[TrackedTask]()
private[this] var _callback:Runnable = null
val queue = createQueue("tracker: "+name);
var done = false
class TrackedTask(var name:Any) extends Task {
def run = {
remove(this)
}
override def toString = name.toString
}
def task(name:Any="unknown"):TrackedTask = {
val rc = new TrackedTask(name)
queue {
assert(_callback==null || !tasks.isEmpty)
tasks.add(rc)
}
return rc
}
def callback(handler: Runnable) {
var start = System.currentTimeMillis
queue {
_callback = handler
checkDone()
}
def schedualCheck(timeout:Long):Unit = {
if( timeout>0 ) {
queue.after(timeout, TimeUnit.MILLISECONDS) {
if( !done ) {
schedualCheck(onTimeout(start, tasks.toArray.toList.map(_.toString)))
}
}
}
}
schedualCheck(timeout)
}
def callback(handler: =>Unit ) {
callback(^(handler))
}
/**
* Subclasses can override if they want to log the timeout event.
* the method should return the next timeout value. If 0, then
* it will not check for further timeouts.
*/
protected def onTimeout(started:Long, tasks: List[String]):Long = 0
private def remove(r:Runnable) = queue {
if( tasks.remove(r) ) {
checkDone()
}
}
private def checkDone() = {
assert(!done)
if( tasks.isEmpty && _callback!=null && !done ) {
done = true
_callback.run
}
}
def await() = {
val latch =new CountDownLatch(1)
callback {
latch.countDown
}
latch.await
}
def await(timeout:Long, unit:TimeUnit) = {
val latch = new CountDownLatch(1)
callback {
latch.countDown
}
latch.await(timeout, unit)
}
override def toString = tasks.synchronized { name+" waiting on: "+tasks }
}
|
sam-ma/hawtdispatch | hawtdispatch-scala/src/main/scala/org/fusesource/hawtdispatch/Future.scala | <filename>hawtdispatch-scala/src/main/scala/org/fusesource/hawtdispatch/Future.scala
/**
* Copyright (C) 2012 FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.hawtdispatch
import java.util.concurrent.{TimeUnit}
import scala.collection.mutable.ListBuffer
trait Future[R] extends ( ()=>R ) {
def get():R
def await():R
def await(time:Long, unit:TimeUnit):Option[R]
def apply() = await()
def onComplete(func: (R)=>Unit):Unit
def completed:Boolean
def map[X](func:R=>X):Future[X]
}
trait SettableFuture[T,R] extends (T => Unit) with Future[R] {
protected var _callback:Option[(R)=>Unit] = None
protected var _result:Option[R] = None
protected object mutex
protected def merge(value:T):Option[R]
def apply(value:T):Unit = set(value)
def set(value:T) = {
val callback = mutex synchronized {
if( !_result.isDefined ) {
_result = merge(value)
if( _result.isDefined ) {
mutex.notifyAll
_callback
} else {
None
}
} else {
None
}
}
callback.foreach(_(_result.get))
}
def get():R = {
_result.get
}
def await():R = {
assert(getCurrentQueue==null, "You must not do any blocking waits when executing on HawtDispatch thread pool.")
mutex.synchronized {
while(_result.isEmpty) {
mutex.wait
}
_result.get
}
}
def await(time:Long, unit:TimeUnit):Option[R] = mutex synchronized {
assert(getCurrentQueue==null, "You must not do any blocking waits when executing on HawtDispatch thread pool.")
var now = System.currentTimeMillis
var deadline = now + unit.toMillis(time)
while(_result.isEmpty && now < deadline ) {
mutex.wait(deadline-now)
if(_result != None) {
return _result
}
now = System.currentTimeMillis
}
return _result
}
def onComplete(func: (R)=>Unit) = {
val callback = mutex synchronized {
// Should only be used once per future.
assert ( ! _callback.isDefined )
if( _result.isDefined ) {
Some(func)
} else {
_callback = Some(func)
None
}
}
callback.foreach(_(_result.get))
}
def completed = mutex synchronized {
_result.isDefined
}
def map[X](func:R=>X) = {
val rc = Future.apply(func)
onComplete(rc(_))
rc
}
}
object Future {
/**
* creates a new future.
*/
def apply[T]():SettableFuture[T,T] = new SettableFuture[T,T] {
protected def merge(value: T): Option[T] = Some(value)
}
/**
* creates a new future which does an on the fly
* transformation of the value.
*/
def apply[T,R](func: T=>R):SettableFuture[T,R] = new SettableFuture[T,R] {
protected def merge(value: T): Option[R] = Some(func(value))
}
/**
* creates a future which only waits for the first
* of the supplied futures to get set.
*/
def first[T](futures:Iterable[Future[T]]):SettableFuture[T,T] = {
assert(!futures.isEmpty)
new SettableFuture[T,T] {
futures.foreach(_.onComplete(apply _))
protected def merge(value: T): Option[T] = {
Some(value)
}
}
}
/**
* creates a future which waits for all
* of the supplied futures to get set and collects all
* the results in an iterable.
*/
def all[T](futures:Iterable[Future[T]]):Future[Iterable[T]] = {
if( futures.isEmpty ) {
val rc = apply[Iterable[T]]()
rc(List())
rc
} else {
val results = new ListBuffer[T]()
new SettableFuture[T,Iterable[T]] {
futures.foreach(_.onComplete(apply _))
protected def merge(value: T): Option[Iterable[T]] = {
results += value
if( results.size == futures.size ) {
Some(results)
} else {
None
}
}
}
}
}
/**
* creates a future which waits for all
* of the supplied futures to get set and collects
* the results via folding function.
*/
def fold[T,R](futures:Iterable[Future[T]], initial:R)(func: (R,T)=>R):Future[R] = {
if( futures.isEmpty ) {
val rc = apply[R]()
rc(initial)
rc
} else {
var cur:R = initial
var collected = 0
new SettableFuture[T,R] {
futures.foreach(_.onComplete(apply _))
protected def merge(value: T): Option[R] = {
cur = func(cur, value)
collected += 1
if( collected == futures.size ) {
Some(cur)
} else {
None
}
}
}
}
}
}
|
sam-ma/hawtdispatch | hawtdispatch-example/src/main/scala/org/fusesource/hawtdispatch/example/discovery/EchoNetScala.scala | <filename>hawtdispatch-example/src/main/scala/org/fusesource/hawtdispatch/example/discovery/EchoNetScala.scala
/**
* Copyright (C) 2012 FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.hawtdispatch.example.discovery
import _root_.java.io.{EOFException, ByteArrayOutputStream}
import _root_.java.net.{ConnectException, InetSocketAddress, URI}
import _root_.java.util.concurrent.TimeUnit
import java.nio.ByteBuffer;
import java.nio.channels.SelectionKey;
import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel;
import org.fusesource.hawtdispatch._
/**
* An example of a networks of servers which advertise connection information to each other.
*/
object EchoNetScala {
def main(args:Array[String]):Unit = {
run
}
def run() = {
val a = new Server(4444).start();
val b = new Server(5555).start();
val c = new Server(6666).start();
Thread.sleep(200);
a.connect(3333);
a.connect(b);
b.connect(c);
System.in.read
}
class Server(val port: Int) {
val me = URI.create("conn://localhost:" + port);
val serverChannel = ServerSocketChannel.open();
serverChannel.socket().bind(new InetSocketAddress(port));
serverChannel.configureBlocking(false);
var seen = List[URI]()
val queue = createQueue(me.toString)
val accept_source = createSource(serverChannel, SelectionKey.OP_ACCEPT, queue);
accept_source.setEventHandler(^ {
// we are a server
// when you are a server, we must first listen for the
// address of the client before sending data.
// once they send us their address, we will send our
// full list of known addresses, followed by our own
// address to signal that we are done.
// Afterward we will only pulls our heartbeat
val client = serverChannel.accept();
try {
val address = client.socket.getRemoteSocketAddress.asInstanceOf[InetSocketAddress]
trace("accept " + address.getPort());
client.configureBlocking(false);
// Server sessions start by reading the client's greeting
val session = new Session(this, client, address)
session.start_read_greeting
} catch {
case e: Exception =>
client.close
}
});
trace("Listening");
def start() = {
accept_source.resume
this
}
def stop() = {
accept_source.suspend
}
def close() = {
accept_source.cancel
}
accept_source.onCancel {
serverChannel.close();
}
def connect(s: Server):Unit = {
connect(s.port);
}
def connect(port: Int):Unit = {
connect(URI.create("conn://localhost:" + port));
}
def connect(uri: URI): Unit = ^{
if ( me.equals(uri) || seen.contains(uri) )
return;
val port = uri.getPort();
val host = uri.getHost();
trace("open " + uri);
val socketChannel = SocketChannel.open();
socketChannel.configureBlocking(false);
val address = new InetSocketAddress(host, port);
socketChannel.connect(address);
val connect_source = createSource(socketChannel, SelectionKey.OP_CONNECT, queue);
connect_source.onEvent {
connect_source.cancel
try {
socketChannel.finishConnect
trace("connected " + uri);
val session = new Session(this, socketChannel, address, uri)
session.start_write_greeting
}
catch {
case e:ConnectException =>
trace("connect to "+uri+" FAILED.");
}
}
connect_source.resume
seen = uri :: seen;
} >>: queue
def trace(str: String) {
println(String.format("%5d - %s", new java.lang.Integer(port), str));
}
}
class Session(val server:Server, val channel: SocketChannel, val address: InetSocketAddress, val uri: URI) {
def this(server:Server, channel: SocketChannel, address: InetSocketAddress) = {
this (server, channel, address, URI.create("conn://" + address.getHostName() + ":" + address.getPort()))
}
val read_buffer = ByteBuffer.allocate(1024);
val queue = createQueue(uri.toString)
val read_source = createSource(channel, SelectionKey.OP_READ, queue);
val write_source = createSource(channel, SelectionKey.OP_WRITE, queue);
val seen = server.seen
def start_read_greeting = {
read_source.setEventHandler(read_greeting)
read_source.resume
}
def read_greeting = ^{
val message = read_frame
if (message!=null) {
// stop looking for read events..
read_source.suspend
val uri = URI.create(message);
trace("welcome");
// Send them our seen uris..
var list:List[Any] = seen.filterNot(x=> x==server.me || x==uri )
list = list ::: List("end");
start_write_data(list, ^{
start_read_hearbeat
})
}
}
def start_write_greeting = {
trace("hello");
start_write_data(server.me::Nil, ^{
start_read_server_listings
})
}
def start_read_server_listings = {
read_source.setEventHandler(read_server_listings)
read_source.resume
}
var listed = List[URI]()
def read_server_listings = ^ {
val message = read_frame
if (message!=null) {
if( message != "end" ) {
val uri: URI = URI.create(message)
listed = uri :: listed;
server.connect(uri)
} else {
// Send them our seen uris..
var list:List[Any] = seen.filterNot(x=> listed.contains(x) || x==server.me )
list = list ::: List("end");
start_write_data(list, ^{
// once done, start sending heartbeats.
start_write_hearbeat
})
}
}
}
def start_read_client_listings = {
read_source.setEventHandler(read_clientlistings)
read_source.resume
}
def read_clientlistings = ^ {
val message = read_frame
if (message!=null) {
if( message != "end" ) {
server.connect(URI.create(message))
} else {
start_read_hearbeat
}
}
}
def start_write_hearbeat:Unit = {
queue.executeAfter(1, TimeUnit.SECONDS, ^{
trace("ping");
start_write_data("ping"::Nil, ^{
start_write_hearbeat
})
});
}
def start_read_hearbeat = {
read_source.setEventHandler(read_hearbeat)
read_source.resume
}
def read_hearbeat = ^ {
val message = read_frame
if (message != null) {
trace("pong");
}
}
def start_write_data(list:List[Any], onDone:Runnable) = {
val baos = new ByteArrayOutputStream()
list.foreach { next =>
baos.write(next.toString().getBytes("UTF-8"))
baos.write(0)
}
val buffer = ByteBuffer.wrap(baos.toByteArray)
write_source.setEventHandler(write_data(buffer, onDone))
write_source.resume
}
def write_data(buffer:ByteBuffer, onDone:Runnable) = ^ {
channel.write(buffer)
if (buffer.remaining == 0) {
write_source.suspend
onDone.run
}
}
def read_frame(): String = {
if( channel.read(read_buffer) == -1 ) {
throw new EOFException();
}
val buf = read_buffer.array
val endPos = eof(buf, 0, read_buffer.position)
if (endPos < 0) {
trace(" --- ");
return null
}
var rc = new String(buf, 0, endPos)
val newPos = read_buffer.position - endPos
System.arraycopy(buf, endPos + 1, buf, 0, newPos)
read_buffer.position(newPos)
// trace(rc);
return rc
}
def eof(data: Array[Byte], offset: Int, pos: Int): Int = {
var i = offset
while (i < pos) {
if (data(i) == 0) {
return i
}
i += 1
}
return - 1
}
def trace(str: String) = {
println(String.format("%5d %5d - %s", new java.lang.Integer(server.port), new java.lang.Integer(uri.getPort()), str));
}
}
}
|
tanbozensen/api | app/services/TanboService.scala | <filename>app/services/TanboService.scala
package services
import models.Tanbo
import play.api.Play.current
import play.api.db.slick.Config.driver.simple._
import play.api.db.slick.DB
import models.TanboDAO
object TanboService {
/**
* 田んぼ情報の作成
*/
def create (tanbo : Tanbo) : Option[Tanbo] = {
DB.withSession { implicit session =>
val id = TanboDAO.create(tanbo)(session)
return TanboDAO.searchByID(id)(session)
}
}
/**
* 全田んぼの取得
*/
def getAll: List[Tanbo] = {
DB.withSession { implicit session =>
return TanboDAO.searchAll(session)
}
}
/**
* 全田んぼの削除
*/
def delete(id: Long) = {
DB.withSession { implicit session =>
TanboDAO.removeById(id)(session)
}
}
} |
tanbozensen/api | app/constants/PHASE.scala | <gh_stars>1-10
package constants
object PHASE {
// objectで定義するとsingletonになる
case object TAUE extends PHASE(0)
case object INEKARI extends PHASE(1)
private val codeTable = Array(TAUE, INEKARI)
def complement(code:Int) : PHASE = codeTable((~code & 0x03) | (code & 0x04))
}
// sealedを付けると、DNAを拡張したクラスはこのファイル内でしか定義できない
// abstractを付けると、DNAを拡張したクラスはA, C, G, T, N以外にないことを保証できるので
// match文がexhaustive(すべてのケースを網羅)になる
sealed abstract class PHASE(val code:Int) {
// A, C, G, T, Nをcase objectとすると、クラス名を表示するtoStringが実装される
val name = toString
// PHASEクラスには自由にメソッドを定義できる
def complement = PHASE.complement(code)
} |
tanbozensen/api | app/constants/RICE_TYPE.scala | <reponame>tanbozensen/api<filename>app/constants/RICE_TYPE.scala
package constants
object RICE_TYPE {
// objectで定義するとsingletonになる
case object SASANISHIKI extends RICE_TYPE(0)
case object KOSHIHIKARI extends RICE_TYPE(1)
private val codeTable = Array(SASANISHIKI, KOSHIHIKARI)
def complement(code:Int) : RICE_TYPE = codeTable((~code & 0x03) | (code & 0x04))
}
// sealedを付けると、DNAを拡張したクラスはこのファイル内でしか定義できない
// abstractを付けると、DNAを拡張したクラスはA, C, G, T, N以外にないことを保証できるので
// match文がexhaustive(すべてのケースを網羅)になる
sealed abstract class RICE_TYPE(val code:Int) {
// A, C, G, T, Nをcase objectとすると、クラス名を表示するtoStringが実装される
val name = toString
// LICE_TYPEクラスには自由にメソッドを定義できる
def complement = RICE_TYPE.complement(code)
} |
tanbozensen/api | app/controllers/TanboController.scala | <gh_stars>1-10
package controllers
import models.Tanbo
import play.api.libs.json.JsError
import play.api.libs.json.Json
import play.api.libs.json.Json.toJsFieldJsValueWrapper
import play.api.mvc.Action
import play.api.mvc.Controller
import models.TanboDAO
import services.TanboService
import com.github.tototoshi.play.json.JsonNaming
object TanboController extends Controller {
implicit val tanboFormat = JsonNaming.snakecase(Json.format[Tanbo])
def post = Action(parse.json) { request =>
{
request.body.validate[Tanbo].map { dto =>
Created(Json.toJson(TanboService.create(dto)))
}.recoverTotal { e =>
BadRequest(Json.obj("error" -> JsError.toFlatJson(e)))
}
}
}
def get = Action { request =>
{
Ok(Json.toJson(TanboService.getAll))
}
}
def delete(id: Long) = Action(parse.empty) { request =>
{
TanboService.delete(id)
Ok("");
}
}
} |
tanbozensen/api | app/models/Tanbo.scala | <gh_stars>1-10
package models
import play.api.Play.current
import play.api.db.slick.Config.driver.simple._
import play.api.db.slick._
/**
* 田んぼ情報クラス
*/
case class Tanbo(
id: Option[Long],
riceType: Option[Int],
phase: Int,
doneDate: String,
latitude: Double,
longitude: Double,
areaUnderTillage: Option[Double]
)
class Tanbos(tag: Tag) extends Table[Tanbo](tag, "tanbo") {
def id = column[Long]("id", O.PrimaryKey, O.AutoInc)
def riceType = column[Int]("riceType", O.Nullable)
def phase = column[Int]("phase", O.NotNull)
def doneDate = column[String]("doneDate", O.NotNull)
def latitude = column[Double]("latitude", O.NotNull)
def longitude = column[Double]("longitude", O.NotNull)
def areaUnderTillage = column[Double]("areaUnderTillage", O.Nullable)
def * = (id.?, riceType.?, phase, doneDate, latitude, longitude, areaUnderTillage.?) <> ((Tanbo.apply _).tupled, Tanbo.unapply)
}
object TanboDAO {
lazy val tanboQuery = TableQuery[Tanbos]
/**
* ID検索
* @param ID
*/
def searchByID(id: Long)(implicit s: Session): Option[Tanbo] = {
tanboQuery.filter { _.id === id } firstOption
}
/**
* 全件取得
* @param ID
*/
def searchAll(implicit s: Session): List[Tanbo] = {
tanboQuery.list
}
/**
* 作成
* @param tanbo
*/
def create(tanbo: Tanbo)(implicit s: Session) : Long = {
return (tanboQuery returning tanboQuery.map(_.id)).insert(tanbo)
}
/**
* 更新
* @param tanbo
*/
def update(tanbo: Tanbo)(implicit s: Session) {
tanboQuery.filter(_.id === tanbo.id).update(tanbo)
}
/**
* 削除
* @param tanbo
*/
def removeById(id: Long)(implicit s: Session) {
tanboQuery.filter(_.id === id).delete
}
} |
alessandrolulli/knnMeetsConnectedComponents | src/main/java/clusteringKMeans/Clustering.scala | package clusteringKMeans
import java.io.FileNotFoundException
import scala.collection.JavaConverters.seqAsJavaListConverter
import scala.collection.mutable.MultiMap
import scala.util.Random
import org.apache.spark.SparkContext.doubleRDDToDoubleRDDFunctions
import org.apache.spark.SparkContext.rddToPairRDDFunctions
import org.apache.spark.api.java.JavaPairRDD
import org.apache.spark.rdd.RDD
import info.debatty.java.graphs.Neighbor
import info.debatty.java.graphs.NeighborList
import info.debatty.java.graphs.NeighborListFactory
import info.debatty.java.graphs.Node
import info.debatty.java.stringsimilarity.JaroWinkler
import info.debatty.spark.nndescent.NNDescent
import util.CCProperties
import util.CCPropertiesImmutable
import util.CCUtil
import scala.collection.JavaConversions._
import org.apache.spark.SparkContext._
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.feature.Word2VecModel
import org.apache.spark.mllib.linalg._
import org.apache.spark.mllib.feature.Word2Vec
import org.apache.spark.mllib.clustering.KMeans
object Clustering
{
def loadDataset(data : RDD[String], property : CCPropertiesImmutable) : RDD[(String, String)] = // return (id, subject) OR (id, seed)
{
val toReturn = data.map(line =>
{
val splitted = line.split(property.separator)
if (splitted.size > 1)
{
(splitted(0), splitted(1))
} else
{
throw new FileNotFoundException("impossible to parse: "+line)
}
})
toReturn
}
def sumArray( m : Array[Double], n : Array[Double] ) : Array[Double] = {
for ( i <- 0 until m.length ) { m( i ) += n( i ) }
return m
}
def divArray( m : Array[Double], divisor : Double ) : Array[Double] = {
for ( i <- 0 until m.length ) { m( i ) /= divisor }
return m
}
def wordToVector( w : String, m : Word2VecModel ) : Vector = {
try {
return m.transform( w )
} catch {
case e : Exception => return Vectors.zeros( 100 )
}
}
def main( args_ : Array[String] ) : Unit =
{
// code from : http://bigdatasciencebootcamp.com/posts/Part_3/clustering_news.html
val timeStart = System.currentTimeMillis()
val propertyLoad = (new CCProperties("CLUSTERING", args_(0))).load();
val property = propertyLoad.getImmutable;
val k = propertyLoad.getInt("K", 39000);
val util = new CCUtil(property);
val sc = util.getSparkContext();
val data = util.loadVertexMail(sc.textFile(property.datasetKNN, property.sparkPartition))
val news_titles = data.map(t => (t._1, t._2.split(" ").toSeq))
val all_input = news_titles.map(t => t._2).flatMap(x => x).map(x => Seq(x))
val word2vec = new Word2Vec()
val model = word2vec.fit(all_input)
val title_vectors = news_titles.map(x => x._2.map(m => wordToVector(m, model).toArray)).filter(t => !t.isEmpty)
.map(t => new DenseVector(divArray(t.reduceLeft(sumArray),t.length)).asInstanceOf[Vector])
val title_pairs = news_titles.map(x => (x, x._2.map(m => wordToVector(m, model).toArray))).filter(t => !t._2.isEmpty)
.map(t => (t._1, new DenseVector(divArray(t._2.reduceLeft(sumArray),t._2.length)).asInstanceOf[Vector]))
var numClusters = k
val numIterations = 25
var clusters = KMeans.train(title_vectors, numClusters, numIterations)
val article_membership = title_pairs.mapValues(x => clusters.predict(x)).cache
article_membership.first
val timeEnd = System.currentTimeMillis()
val toPrint = article_membership.map(x => x._1._1+"\t"+x._2)
toPrint.coalesce(1, true).saveAsTextFile(property.outputFile)
util.io.printStatSimple((timeEnd - timeStart).toString)
}
} |
alessandrolulli/knnMeetsConnectedComponents | src/main/java/util/CCUtil.scala | package util
import org.apache.spark.SparkContext._
import org.apache.spark.rdd.RDD
import scala.collection.mutable.ListBuffer
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import scala.io.Source
import org.apache.spark.api.java.JavaSparkContext
import com.google.common.base.Joiner
import java.io.FileWriter
class CCUtil(property : CCPropertiesImmutable) extends Serializable {
val io = new CCUtilIO(property)
var vertexNumber = 0L
def getSparkContext() : SparkContext =
{
val conf = new SparkConf()
.setMaster(property.sparkMaster)
.setAppName(property.appName)
.set("spark.executor.memory", property.sparkExecutorMemory)
.set("spark.storage.blockManagerSlaveTimeoutMs", property.sparkBlockManagerSlaveTimeoutMs)
.set("spark.shuffle.manager", property.sparkShuffleManager)
.set("spark.shuffle.consolidateFiles", property.sparkShuffleConsolidateFiles)
.set("spark.io.compression.codec", property.sparkCompressionCodec)
.set("spark.akka.frameSize", property.sparkAkkaFrameSize)
.set("spark.driver.maxResultSize", property.sparkDriverMaxResultSize)
// .set("spark.task.cpus", "8")
.setJars(Array(property.jarPath))
if(property.sparkCoresMax > 0)
conf.set("spark.cores.max", property.sparkCoresMax.toString)
if(property.sparkExecutorInstances > 0)
conf.set("spark.executor.instances", property.sparkExecutorInstances.toString)
val spark = new SparkContext(conf)
// spark.setCheckpointDir(".")
spark
}
def getJavaSparkContext() : JavaSparkContext =
{
val conf = new SparkConf()
.setMaster(property.sparkMaster)
.setAppName(property.appName)
.set("spark.executor.memory", property.sparkExecutorMemory)
.set("spark.storage.blockManagerSlaveTimeoutMs", property.sparkBlockManagerSlaveTimeoutMs)
.set("spark.shuffle.manager", property.sparkShuffleManager)
.set("spark.shuffle.consolidateFiles", property.sparkShuffleConsolidateFiles)
.set("spark.io.compression.codec", property.sparkCompressionCodec)
.set("spark.akka.frameSize", property.sparkAkkaFrameSize)
.set("spark.driver.maxResultSize", property.sparkDriverMaxResultSize)
// .set("spark.task.cpus", "8")
.setJars(Array(property.jarPath))
if(property.sparkCoresMax > 0)
conf.set("spark.cores.max", property.sparkCoresMax.toString)
if(property.sparkExecutorInstances > 0)
conf.set("spark.executor.instances", property.sparkExecutorInstances.toString)
val spark = new SparkContext(conf)
// spark.setCheckpointDir(".")
spark
}
// return edgelist and edge associated to each vertex
def loadEdgeFromFile(data : RDD[String]) : (RDD[(Long, Long)], RDD[(Long, Iterable[Long])]) =
{
val toReturnEdgeList = data.flatMap(line =>
{
val splitted = line.split(property.separator)
if (splitted.size >= 1) {
try {
if(property.vertexIdMultiplier != -1)
{
Array((splitted(0).toLong * property.vertexIdMultiplier, splitted(1).toLong * property.vertexIdMultiplier), (splitted(1).toLong * property.vertexIdMultiplier, splitted(0).toLong * property.vertexIdMultiplier))
} else
{
Array((splitted(0).toLong, splitted(1).toLong), (splitted(1).toLong, splitted(0).toLong))
}
} catch {
case e : Exception => Array[(Long, Long)]()
}
} else {
Array[(Long, Long)]()
}
})
// val toReturnVertex = toReturnEdgeList.distinct.groupByKey
val toReturnVertex = toReturnEdgeList.groupByKey
if (property.printMessageStat && property.allStat) {
val edgeNumber = toReturnEdgeList.count / 2
vertexNumber = toReturnVertex.count
val degreeMax = toReturnVertex.map(t => t._2.size).max
val degreeAvg = toReturnVertex.map(t => t._2.size).mean
io.printStat(edgeNumber, "edgeNumber")
io.printStat(vertexNumber, "vertexNumber")
io.printStat(degreeMax, "degreeMax")
io.printStat(degreeAvg, "degreeAvg")
}
(toReturnEdgeList, toReturnVertex)
}
def loadEdgeFromFileComparable(data : RDD[String]) : (RDD[(String, String)], RDD[(String, Iterable[String])]) =
{
val toReturnEdgeList = data.flatMap(line =>
{
val splitted = line.split(property.separator)
if (splitted.size >= 1) {
try {
Array((splitted(0), splitted(1)), (splitted(1), splitted(0)))
} catch {
case e : Exception => Array[(String, String)]()
}
} else {
Array[(String, String)]()
}
})
// val toReturnVertex = toReturnEdgeList.distinct.groupByKey
val toReturnVertex = toReturnEdgeList.groupByKey
if (property.printMessageStat && property.allStat) {
val edgeNumber = toReturnEdgeList.count / 2
vertexNumber = toReturnVertex.count
val degreeMax = toReturnVertex.map(t => t._2.size).max
val degreeAvg = toReturnVertex.map(t => t._2.size).mean
io.printStat(edgeNumber, "edgeNumber")
io.printStat(vertexNumber, "vertexNumber")
io.printStat(degreeMax, "degreeMax")
io.printStat(degreeAvg, "degreeAvg")
}
(toReturnEdgeList, toReturnVertex)
}
def loadEdgeFromFileAdjComparable(data : RDD[String], edgeThreshold : Double, k : Int) : (RDD[(String, String)], RDD[(String, Iterable[String])]) =
{
val toReturnEdgeList = data.flatMap(line =>
{
val splitted = line.split(property.separator)
if (splitted.size >= 1 && !splitted(0).trim.isEmpty()) {
try {
val splitted2 = splitted(1).split(" ")
if(splitted2.size > 0)
{
splitted2 .map(t => t.trim)
.filter(t => !t.isEmpty())
.grouped(2).toArray
.filter(t => t(1).toDouble > edgeThreshold)
.flatMap(t => Array((splitted(0), t(0)),(t(0), splitted(0))))
} else
{
Array[(String, String)]()
}
} catch {
case e : Exception => Array[(String, String)]()
}
} else {
Array[(String, String)]()
}
})
// val toReturnVertex = toReturnEdgeList.distinct.groupByKey
val toReturnVertex = toReturnEdgeList.groupByKey
if(property.printCCDistribution)
{
val distribution = data.flatMap(line =>
{
val splitted = line.split(property.separator)
if (splitted.size >= 1 && !splitted(0).trim.isEmpty()) {
try {
val splitted2 = splitted(1).split(" ")
if(splitted2.size > 0)
{
splitted2 .map(t => t.trim)
.filter(t => !t.isEmpty())
.grouped(2).toArray
.filter(t => t(1).toDouble > edgeThreshold)
.map(t => (splitted(0), t(0)))
} else
{
Array[(String, String)]()
}
} catch {
case e : Exception => Array[(String, String)]()
}
} else {
Array[(String, String)]()
}
})
val edgesRemovedDistribution = distribution.groupByKey.map(t => (k - t._2.size, 1)).reduceByKey{case(a,b) => a+b}
val joiner = Joiner.on(",")
val printFile = new FileWriter( "distributionEdgeRemoved.txt", true )
val token : Array[Object] = Array( property.datasetCC,
k.toString,
edgeThreshold.toString)
val tokenToString = joiner.join(token)
// description = dataset,k,edgeThreshold,edgeRemoved,number
val toPrint = edgesRemovedDistribution.map(t => tokenToString+","+t._1+","+t._2+"\n").reduce{case(a,b) => a+b}
printFile.write(toPrint)
printFile.close
}
if (property.printMessageStat && property.allStat) {
val edgeNumber = toReturnEdgeList.count / 2
vertexNumber = toReturnVertex.count
val degreeMax = toReturnVertex.map(t => t._2.size).max
val degreeAvg = toReturnVertex.map(t => t._2.size).mean
io.printStat(edgeNumber, "edgeNumber")
io.printStat(vertexNumber, "vertexNumber")
io.printStat(degreeMax, "degreeMax")
io.printStat(degreeAvg, "degreeAvg")
}
(toReturnEdgeList, toReturnVertex)
}
def loadEdgeFromFileAdjMetric(data : RDD[String], edgeThreshold : Double) : (RDD[(String, String)], RDD[(String, Iterable[String])]) =
{
val toReturnEdgeList = data.flatMap(line =>
{
val splitted = line.split(property.separator)
if (splitted.size >= 1 && !splitted(0).trim.isEmpty()) {
try {
val splitted2 = splitted(1).split(" ")
if(splitted2.size > 0)
{
splitted2 .map(t => t.trim)
.filter(t => !t.isEmpty())
.grouped(2).toArray
.filter(t => t(1).toDouble > edgeThreshold)
.flatMap(t => Array((splitted(0), t(0)),(t(0), splitted(0))))
} else
{
Array[(String, String)]()
}
} catch {
case e : Exception => Array[(String, String)]()
}
} else {
Array[(String, String)]()
}
})
// val toReturnVertex = toReturnEdgeList.distinct.groupByKey
val toReturnVertex = toReturnEdgeList.groupByKey
// if (property.printMessageStat && property.allStat) {
// val edgeNumber = toReturnEdgeList.count / 2
// vertexNumber = toReturnVertex.count
//
// val degreeMax = toReturnVertex.map(t => t._2.size).max
// val degreeAvg = toReturnVertex.map(t => t._2.size).mean
//
// io.printStat(edgeNumber, "edgeNumber")
// io.printStat(vertexNumber, "vertexNumber")
// io.printStat(degreeMax, "degreeMax")
// io.printStat(degreeAvg, "degreeAvg")
// }
(toReturnEdgeList, toReturnVertex)
}
def loadEdgeFromFileAdjBrute(data : RDD[String]) : RDD[(String, Iterable[(String, Double)])] =
{
val toReturnEdgeList = data.flatMap(line =>
{
val splitted = line.split(property.separator)
if (splitted.size >= 1 && !splitted(0).trim.isEmpty()) {
try {
val splitted2 = splitted(1).split(" ")
if(splitted2.size > 0)
{
splitted2 .map(t => t.trim)
.filter(t => !t.isEmpty())
.grouped(2).toArray
// .filter(t => t(1).toDouble > edgeThreshold)
.flatMap(t => Array((splitted(0), (t(0), t(1).toDouble)),(t(0), (splitted(0), t(1).toDouble))))
} else
{
Array[(String, (String, Double))]()
}
} catch {
case e : Exception => Array[(String, (String, Double))]()
}
} else {
Array[(String, (String, Double))]()
}
})
toReturnEdgeList.groupByKey
}
def loadEdgeFromFileAdjBruteKNN(data : RDD[String]) : RDD[(String, Iterable[(String, Double)])] =
{
val toReturnEdgeList = data.flatMap(line =>
{
val splitted = line.split(property.separator)
if (splitted.size >= 1 && !splitted(0).trim.isEmpty()) {
try {
val splitted2 = splitted(1).split(" ")
if(splitted2.size > 0)
{
splitted2 .map(t => t.trim)
.filter(t => !t.isEmpty())
.grouped(2).toArray
// .filter(t => t(1).toDouble > edgeThreshold)
.flatMap(t => Array((splitted(0), (t(0), t(1).toDouble))))
} else
{
Array[(String, (String, Double))]()
}
} catch {
case e : Exception => Array[(String, (String, Double))]()
}
} else {
Array[(String, (String, Double))]()
}
})
toReturnEdgeList.groupByKey
}
def loadVertexMail(data : RDD[String]) : RDD[(String, String)] =
{
val toReturnEdgeList : RDD[(String, String)] = data.map(line =>
{
val splitted = line.split(property.separator)
if (splitted.size >= 1 && !splitted(0).trim.isEmpty) {
try {
(splitted(0), splitted(1))
} catch {
case e : Exception => ("EMPTY","EMPTY")
}
} else {
("EMPTY","EMPTY")
}
})
toReturnEdgeList.filter(t => !t._1.equals("EMPTY"))
}
// load from a file in the format of
// vertexID, arcID
def loadVertexEdgeFile(data : RDD[String]) : (RDD[(Long, Long)], RDD[(Long, Iterable[Long])]) =
{
def mapToEdgeList(item : (String, Iterable[Long])) : Iterable[(Long, Long)] =
{
var outputList : ListBuffer[(Long, Long)] = new ListBuffer
val it = item._2.iterator
while (it.hasNext) {
val next = it.next
val it2 = item._2.iterator
while (it2.hasNext) {
val next2 = it2.next
if (next != next2) {
outputList.prepend((next, next2))
}
}
}
outputList.toIterable
}
val toReturnEdgeList = data.flatMap(line =>
{
val splitted = line.split(",")
if (splitted.size >= 1) {
try {
Array((splitted(1), splitted(0).toLong))
} catch {
case e : Exception => Array[(String, Long)]()
}
} else {
Array[(String, Long)]()
}
})
val edgeList = toReturnEdgeList.groupByKey.flatMap(mapToEdgeList)
// io.printEdgelist(edgeList)
val toReturnVertex = edgeList.groupByKey
if (property.printMessageStat) {
val edgeNumber = toReturnEdgeList.count
val vertexNumber = toReturnVertex.count
io.printStat(edgeNumber, "edgeNumber")
io.printStat(vertexNumber, "vertexNumber")
}
(edgeList, toReturnVertex)
}
def getCCNumber( rdd : RDD[(Long, Int)] ) =
{
rdd.count
}
def getCCNumber( rdd : Array[(Long, Int)] ) =
{
rdd.size
}
def getCCNumberNoIsolatedVertices( rdd : RDD[(Long, Int)] ) =
{
rdd.filter(t=>t._2!=1).count
}
def getCCNumberString( rdd : RDD[(String, Int)] ) =
{
rdd.count
}
def getCCNumberString( rdd : Array[(String, Int)] ) =
{
rdd.size
}
def getCCNumberNoIsolatedVerticesString( rdd : RDD[(String, Int)] ) =
{
rdd.filter(t=>t._2!=1).count
}
def getCCNumberNoIsolatedVertices( rdd : Array[(Long, Int)] ) =
{
rdd.filter(t=>t._2!=1).size
}
def getCCMaxSize( rdd : RDD[(Long, Int)] ) =
{
rdd.map(t=>t._2).max
}
def getCCMaxSizeString( rdd : RDD[(String, Int)] ) =
{
rdd.map(t=>t._2).max
}
def getCCMaxSize( rdd : Array[(Long, Int)] ) =
{
rdd.map(t=>t._2).max
}
def printSimplification( step : Int, activeVertices : Long , activeEdges : Double, degreeMax : Int) =
{
io.printSimplification(step, activeVertices, vertexNumber, activeEdges, degreeMax)
}
def printSimplificationDiameter( step : Int, activeVertices : Long , activeEdges : Double, degreeMax : Int) =
{
io.printSimplification(step, activeVertices, vertexNumber, activeEdges, degreeMax)
}
def printTimeStep( step : Int, time : Long) =
{
if(!property.printMessageStat)
io.printTimeStep(step, time)
}
def printTimeStepDiameter( step : Int, time : Long) =
{
if(!property.printMessageStat)
io.printTimeStep(step, time)
}
def testEnded( rdd : RDD[(Long, Int)],
step : Int,
timeBegin : Long,
timeEnd : Long,
timeSparkLoaded : Long,
timeDataLoaded : Long,
reduceInputMessageNumber : Long,
reduceInputSize : Long,
bitmaskCustom : String) =
{
io.printTime( timeBegin, timeEnd, "all" )
io.printTime( timeSparkLoaded, timeEnd, "allComputationAndLoadingGraph" )
io.printTime( timeDataLoaded, timeEnd, "allComputation" )
io.printStep( step )
io.printStat(reduceInputMessageNumber, "reduceInputMessageNumber")
io.printStat(reduceInputSize, "reduceInputSize")
io.printFileEnd(property.appName)
io.printAllStat( property.algorithmName,
property.datasetCC,
property.sparkPartition,
step,
(timeEnd - timeBegin),
(timeEnd - timeSparkLoaded) ,
(timeEnd - timeDataLoaded),
reduceInputMessageNumber,
reduceInputSize,
getCCNumber(rdd),
getCCNumberNoIsolatedVertices(rdd),
getCCMaxSize(rdd),
property.customColumnValue,
bitmaskCustom)
if(property.printCCDistribution)
{
io.printCCDistribution(rdd)
io.printCC(rdd)
}
}
def testEndedComparable( rdd : RDD[(String, Int)],
step : Int,
timeBegin : Long,
timeEnd : Long,
timeSparkLoaded : Long,
timeDataLoaded : Long,
reduceInputMessageNumber : Long,
reduceInputSize : Long,
bitmaskCustom : String) =
{
io.printTime( timeBegin, timeEnd, "all" )
io.printTime( timeSparkLoaded, timeEnd, "allComputationAndLoadingGraph" )
io.printTime( timeDataLoaded, timeEnd, "allComputation" )
io.printStep( step )
io.printStat(reduceInputMessageNumber, "reduceInputMessageNumber")
io.printStat(reduceInputSize, "reduceInputSize")
io.printFileEnd(property.appName)
io.printAllStat( property.algorithmName,
property.datasetCC,
property.sparkPartition,
step,
(timeEnd - timeBegin),
(timeEnd - timeSparkLoaded) ,
(timeEnd - timeDataLoaded),
reduceInputMessageNumber,
reduceInputSize,
getCCNumberString(rdd),
getCCNumberNoIsolatedVerticesString(rdd),
getCCMaxSizeString(rdd),
property.customColumnValue,
bitmaskCustom)
if(property.printCCDistribution)
{
io.printCCDistributionString(rdd)
// io.printCC(rdd)
}
}
}
|
alessandrolulli/knnMeetsConnectedComponents | src/main/java/crackerAllComparable/CrackerMessagePropagation.scala | <gh_stars>1-10
package crackerAllComparable
@serializable
class CrackerTreeMessagePropagation [T <: Comparable[T]](val min : Option[T], val child : Set[T]) extends CrackerMessageSize
{
def getMessageSize = child.size + 1
} |
alessandrolulli/knnMeetsConnectedComponents | src/main/java/crackerAllComparable/CrackerAllComparable.scala | <filename>src/main/java/crackerAllComparable/CrackerAllComparable.scala
package crackerAllComparable
import scala.Array.canBuildFrom
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext.rddToPairRDDFunctions
import org.apache.spark.rdd.RDD
import util.CCProperties
import util.CCUtil
import util.CCPropertiesImmutable
import java.io.PrintWriter
import java.io.File
import java.io.FileWriter
object CrackerAllComparable {
def main(args : Array[String]) : Unit =
{
val timeBegin = System.currentTimeMillis()
/*
* additional properties:
* crackerUseUnionInsteadOfJoin : true | false
* crackerCoalescePartition : true | false
*/
val propertyLoad = new CCProperties("CRACKER_ALL_COMPARABLE", args(0)).load
val crackerUseUnionInsteadOfJoin = propertyLoad.getBoolean("crackerUseUnionInsteadOfJoin", true)
val crackerCoalescePartition = propertyLoad.getBoolean("crackerCoalescePartition", true)
val crackerForceEvaluation = propertyLoad.getBoolean("crackerForceEvaluation", true)
val crackerSkipPropagation = propertyLoad.getBoolean("crackerSkipPropagation", false)
val nnDescentK = propertyLoad.getInt("nnDescentK", 10);
val property = propertyLoad.getImmutable
val cracker = new CrackerAlgorithm[String](property)
val util = new CCUtil(property)
val spark = util.getSparkContext()
val timeSparkLoaded = System.currentTimeMillis()
val file = spark.textFile(property.datasetCC, property.sparkPartition)
util.io.printFileStart(property.appName)
val (parsedData, fusedData) = util.loadEdgeFromFileAdjComparable(file, property.edgeThreshold, nnDescentK)
var ret = fusedData.map(item => (item._1, new CrackerTreeMessageIdentification((item._2.toSet + item._1).min, item._2.toSet)))
val timeDataLoaded = System.currentTimeMillis()
var control = false;
var step = 0
var treeRDD : Option[RDD[(String, CrackerTreeMessageTree[String])]] = Option.empty
// if not done, CC of size 1 are not recognized
treeRDD = Option.apply(ret.map(t => (t._1, new CrackerTreeMessageTree(Option.empty, Set()))))
// this it is ok, but must be written a better function to decide when performing load balancing (it is more important at the beginning of the computation)
def forceLoadBalancing(step : Int) : Boolean =
{
step == 0 || step == 2 || step == 8 || step == 16 || step == 32
// step < 10 && step % 3 == 0
}
while (!control) {
// simplification step
val timeStepStart = System.currentTimeMillis()
ret = ret.flatMap(item => cracker.emitBlue(item, true))
ret = ret.reduceByKey(cracker.reduceBlue).cache
val active = ret.count
control = active <= property.switchLocal// set the number where to switch in local mode
val timeStepBlue = System.currentTimeMillis()
util.printTimeStep(step + 1, timeStepBlue-timeStepStart)
if (!control) {
// reduction step
val check = step
val tmp = ret.flatMap(item => cracker.emitRed(item, forceLoadBalancing(check)))
if(forceLoadBalancing(check))
{
util.io.printStat(check, "loadBalancing triggered")
}
val tmpReduced = tmp.reduceByKey(cracker.reduceRed)
ret = tmpReduced.filter(t => t._2.first.isDefined).map(t => (t._1, t._2.first.get))
treeRDD = cracker.mergeTree(treeRDD, tmpReduced.filter(t => t._2.second.isDefined).map(t => (t._1, t._2.second.get)), crackerUseUnionInsteadOfJoin, crackerForceEvaluation)
val timeStepEnd = System.currentTimeMillis()
step = step + 2
util.io.printTimeStep(timeStepStart, timeStepBlue, timeStepEnd)
util.printTimeStep(step, timeStepEnd-timeStepBlue)
} else {
step = step + 1
util.io.printTime(timeStepStart, timeStepBlue, "blue")
}
}
if(true)// run local
{
val timeLocalStart = System.currentTimeMillis()
var retCollected = ret.collect
control = false
var localStep = 0
while(!control)
{
// simpli
val tmp = retCollected.flatMap(item => cracker.emitRed(item))
val tmpReduced = tmp.groupBy(t => t._1).toArray.map { case (group, traversable) => (group, traversable.map(t=> t._2).reduce(cracker.reduceRed)) }
retCollected = tmpReduced.filter(t => t._2.first.isDefined).map(t => (t._1, t._2.first.get))
treeRDD = cracker.mergeTree(spark, treeRDD, tmpReduced.filter(t => t._2.second.isDefined).map(t => (t._1, t._2.second.get)), crackerUseUnionInsteadOfJoin, crackerForceEvaluation)
// blue step
retCollected = retCollected.flatMap(item => cracker.emitBlue(item, false))
retCollected = retCollected.groupBy(t => t._1).toArray.map { case (group, traversable) => (group, traversable.map(t=> t._2).reduce(cracker.reduceBlue)) }
val active = retCollected.size
control = active == 0
localStep += 2
}
val timeLocalEnd = System.currentTimeMillis()
util.io.printStat(localStep, "localStep")
util.io.printStat(timeLocalEnd - timeLocalStart, "localTime")
}
var treeRDDPropagationTmp = treeRDD.get
if(crackerUseUnionInsteadOfJoin && crackerCoalescePartition)
{
val timeStepStart = System.currentTimeMillis()
treeRDDPropagationTmp = treeRDDPropagationTmp.coalesce(property.sparkPartition)
val timeStepBlue = System.currentTimeMillis()
util.io.printTime(timeStepStart, timeStepBlue, "coalescing")
}
var treeRDDPropagation = treeRDDPropagationTmp.reduceByKey(cracker.reducePrepareDataForPropagation).map(t => (t._1, t._2.getMessagePropagation(t._1))).cache
control = false
while (!control) {
val timeStepStart = System.currentTimeMillis()
treeRDDPropagation = treeRDDPropagation.flatMap(item => cracker.mapPropagate(item))
treeRDDPropagation = treeRDDPropagation.reduceByKey(cracker.reducePropagate).cache
control = treeRDDPropagation.map(t => t._2.min.isDefined).reduce { case (a, b) => a && b }
step = step + 1
val timeStepBlue = System.currentTimeMillis()
util.io.printTime(timeStepStart, timeStepBlue, "propagation")
util.printTimeStep(step, timeStepBlue-timeStepStart)
}
val timeEnd = System.currentTimeMillis()
util.testEndedComparable(treeRDDPropagation.map(t => (t._2.min.get, 1)).reduceByKey { case (a, b) => a + b },
step,
timeBegin,
timeEnd,
timeSparkLoaded,
timeDataLoaded,
0,
0,
getBitmaskStat(crackerUseUnionInsteadOfJoin,crackerCoalescePartition,crackerForceEvaluation))
if(property.printCC)
{
val toPrint = treeRDDPropagation.map(t => t._1+"\t"+t._2.min.get)
toPrint.coalesce(1, true).saveAsTextFile(property.outputFile)
}
}
def bool2int(b:Boolean) = if (b) 1 else 0
def printLargestCC(sc : SparkContext, property : CCPropertiesImmutable, tree : RDD[(String, CrackerTreeMessagePropagation[String])], edgelist : RDD[(String, String)]) =
{
val maxCCId = tree.map(t => (t._2.min.get, 1)).reduceByKey { case (a, b) => a + b }.max()(new Ordering[Tuple2[String, Int]]() {
override def compare(x: (String, Int), y: (String, Int)): Int =
Ordering[Int].compare(x._2, y._2)
})._1
val maxCCVertex = tree.filter(t => t._2.min == maxCCId).map(t => t._1)
val maxCCVertexBroadcast = sc.broadcast(maxCCVertex.collect.toSet)
val edgelistFiltered = edgelist.filter{case (s, d) => maxCCVertexBroadcast.value.contains(d)}.collect
val writer = new FileWriter( property.filenameLargestCC, false )
var edge = ""
for(edge <- edgelistFiltered)
{
writer.write(edge._1+" "+edge._2+"\n")
}
writer.close()
}
def getBitmaskStat( crackerUseUnionInsteadOfJoin : Boolean,
crackerCoalescePartition : Boolean,
crackerForceEvaluation : Boolean) : String =
{
bool2int(crackerUseUnionInsteadOfJoin).toString+bool2int(crackerCoalescePartition).toString+bool2int(crackerForceEvaluation).toString
}
} |
alessandrolulli/knnMeetsConnectedComponents | src/main/java/util/CCPropertiesImmutable.scala | <gh_stars>1-10
package util
@serializable
class CCPropertiesImmutable(algorithmNameFromConfig : String,
val datasetKNN : String,
val datasetCC : String,
val outputFile : String,
val jarPath : String,
val sparkMaster : String,
val sparkPartition : Int,
val sparkExecutorMemory : String,
val sparkBlockManagerSlaveTimeoutMs : String,
val sparkCoresMax : Int,
val sparkShuffleManager : String,
val sparkCompressionCodec : String,
val sparkShuffleConsolidateFiles : String,
val sparkAkkaFrameSize : String,
val sparkDriverMaxResultSize : String,
val sparkExecutorInstances : Int,
val separator : String,
val printMessageStat : Boolean,
val printLargestCC : Boolean,
val printCC : Boolean,
val printCCDistribution : Boolean,
val printAll : Boolean,
val customColumnValue : String,
val switchLocal : Int,
val switchLocalActive : Boolean,
val vertexIdMultiplier : Int,
val vertexNumber : Int,
val loadBalancing : Boolean,
val selfStar : Boolean,
val transmitPreviousNeighbours : Boolean,
val edgeThreshold : Double) extends Serializable
{
val algorithmName = if(loadBalancing) algorithmNameFromConfig+"_LOAD" else algorithmNameFromConfig
val appName = algorithmName+"_"+datasetKNN+"_"+datasetCC
val allStat = printMessageStat && appName.contains("CRA")
val filenameLargestCC = datasetCC+"_largestCC"
} |
alessandrolulli/knnMeetsConnectedComponents | src/main/java/crackerAllComparable/CrackerAlgorithm.scala | package crackerAllComparable
import org.apache.spark.SparkContext._
import scala.collection.mutable.ListBuffer
import org.apache.spark.rdd.RDD
import org.apache.spark.SparkContext
import java.io.FileWriter
import util.CCPropertiesImmutable
@serializable
class CrackerAlgorithm[T <: Comparable[T]](property : CCPropertiesImmutable) {
def mapPropagate(item : (T, CrackerTreeMessagePropagation[T])) : Iterable[(T, CrackerTreeMessagePropagation[T])] =
{
var outputList : ListBuffer[(T, CrackerTreeMessagePropagation[T])] = new ListBuffer
if (item._2.min.isDefined) {
outputList.prepend((item._1, new CrackerTreeMessagePropagation(item._2.min, Set())))
val it = item._2.child.iterator
while (it.hasNext) {
val next = it.next
outputList.prepend((next, new CrackerTreeMessagePropagation(item._2.min, Set())))
}
} else {
outputList.prepend(item)
}
outputList
}
def reducePropagate(item1 : CrackerTreeMessagePropagation[T], item2 : CrackerTreeMessagePropagation[T]) : CrackerTreeMessagePropagation[T] =
{
var minEnd = item1.min
if (minEnd.isEmpty) minEnd = item2.min
new CrackerTreeMessagePropagation(minEnd, item1.child ++ item2.child)
}
def emitBlue(item : (T, CrackerTreeMessageIdentification[T]), forceLoadBalancing : Boolean) : Iterable[(T, CrackerTreeMessageIdentification[T])] =
{
var outputList : ListBuffer[(T, CrackerTreeMessageIdentification[T])] = new ListBuffer
if (item._2.min == item._1 && (item._2.neigh.isEmpty || (item._2.neigh.size == 1 && item._2.neigh.contains(item._1)))) {
// outputList.prepend( ( item._1, new CrackerTreeMessage( item._2.min, Set()) ) )
} else {
val min = item._2.min
if (item._2.neigh.isEmpty) {
outputList.prepend((item._1, new CrackerTreeMessageIdentification(min, Set())))
} else {
outputList.prepend((item._1, new CrackerTreeMessageIdentification(min, Set(min))))
}
if (min.compareTo(item._1)<0 || !forceLoadBalancing) {
val it = item._2.neigh.iterator
while (it.hasNext) {
val next = it.next
outputList.prepend((next, new CrackerTreeMessageIdentification(min, Set(min))))
}
}
}
outputList.toIterable
}
def emitRed(item : (T, CrackerTreeMessageIdentification[T])) : Iterable[(T, CrackerTreeMessageRedPhase[T])] = {
emitRed(item, false)
}
def emitRed(item : (T, CrackerTreeMessageIdentification[T]), forceLoadBalancing : Boolean) : Iterable[(T, CrackerTreeMessageRedPhase[T])] = {
var outputList : ListBuffer[(T, CrackerTreeMessageRedPhase[T])] = new ListBuffer
val minset : Set[T] = item._2.neigh
if (minset.size > 1) {
if(property.loadBalancing || forceLoadBalancing)
{
outputList.prepend((item._2.min, CrackerTreeMessageRedPhase.apply(new CrackerTreeMessageIdentification(item._2.min, Set(item._2.min)))))
}
else
{
outputList.prepend((item._2.min, CrackerTreeMessageRedPhase.apply(new CrackerTreeMessageIdentification(item._2.min, minset))))
}
var it = minset.iterator
while (it.hasNext) {
val value : T = it.next
if (value != item._2.min)
outputList.prepend((value, CrackerTreeMessageRedPhase.apply(new CrackerTreeMessageIdentification(item._2.min, Set(item._2.min)))))
}
} else if (minset.size == 1 && minset.contains(item._1)) {
outputList.prepend((item._1, CrackerTreeMessageRedPhase.apply(new CrackerTreeMessageIdentification(item._1, Set()))))
}
if (!item._2.neigh.contains(item._1)) {
outputList.prepend((item._2.min, CrackerTreeMessageRedPhase.apply(new CrackerTreeMessageTree(Option.empty, Set(item._1)))))
outputList.prepend((item._1, CrackerTreeMessageRedPhase.apply(new CrackerTreeMessageTree(Option.apply(item._2.min), Set()))))
}
outputList.toIterable
}
def reduceBlue(item1 : CrackerTreeMessageIdentification[T], item2 : CrackerTreeMessageIdentification[T]) : CrackerTreeMessageIdentification[T] =
{
val ret = item1.neigh ++ item2.neigh
var min = item1.min
if(item2.min.compareTo(item1.min)<0) min = item2.min
new CrackerTreeMessageIdentification(min, ret)
}
def mergeMessageIdentification(first : Option[CrackerTreeMessageIdentification[T]], second : Option[CrackerTreeMessageIdentification[T]]) : Option[CrackerTreeMessageIdentification[T]] =
{
if (first.isDefined) {
first.get.merge(second)
} else {
second
}
}
def mergeMessageTree(first : Option[CrackerTreeMessageTree[T]], second : Option[CrackerTreeMessageTree[T]]) : Option[CrackerTreeMessageTree[T]] =
{
if (first.isDefined) {
first.get.merge(second)
} else {
second
}
}
def reduceRed(item1 : CrackerTreeMessageRedPhase[T], item2 : CrackerTreeMessageRedPhase[T]) : CrackerTreeMessageRedPhase[T] =
{
new CrackerTreeMessageRedPhase(mergeMessageIdentification(item1.first, item2.first), mergeMessageTree(item1.second, item2.second))
}
def mergeTree(start : Option[RDD[(String, CrackerTreeMessageTree[String])]], add : RDD[(String, CrackerTreeMessageTree[String])], crackerUseUnionInsteadOfJoin : Boolean, crackerForceEvaluation : Boolean) : Option[RDD[(String, CrackerTreeMessageTree[String])]] =
{
if (start.isDefined) {
if(crackerUseUnionInsteadOfJoin)
{
Option.apply(start.get.union(add))
} else
{
if(crackerForceEvaluation)
{
val treeUpdated = start.get.leftOuterJoin(add).map(t => (t._1, t._2._1.merge(t._2._2).get))
val forceEvaluation = treeUpdated.count
Option.apply(treeUpdated)
} else
{
Option.apply(start.get.leftOuterJoin(add).map(t => (t._1, t._2._1.merge(t._2._2).get)))
}
}
} else {
Option.apply(add)
}
}
def mergeTree(spark : SparkContext, start : Option[RDD[(String, CrackerTreeMessageTree[String])]], add : Array[(String, CrackerTreeMessageTree[String])], crackerUseUnionInsteadOfJoin : Boolean, crackerForceEvaluation : Boolean) : Option[RDD[(String, CrackerTreeMessageTree[String])]] =
{
if (start.isDefined) {
if(crackerUseUnionInsteadOfJoin)
{
Option.apply(start.get.union(spark.parallelize(add)))
} else
{
if(crackerForceEvaluation)
{
val treeUpdated = start.get.leftOuterJoin(spark.parallelize(add)).map(t => (t._1, t._2._1.merge(t._2._2).get))
val forceEvaluation = treeUpdated.count
Option.apply(treeUpdated)
} else
{
Option.apply(start.get.leftOuterJoin(spark.parallelize(add)).map(t => (t._1, t._2._1.merge(t._2._2).get)))
}
}
} else {
Option.apply(spark.parallelize(add))
}
}
def mergeTree(start : Option[Array[(Long, CrackerTreeMessageTree[String])]], add : Array[(Long, CrackerTreeMessageTree[String])]) : Option[Array[(Long, CrackerTreeMessageTree[String])]] =
{
if (start.isDefined) {
Option.apply(start.get.union(add))
} else {
Option.apply(add)
}
}
def reducePrepareDataForPropagation(a : CrackerTreeMessageTree[T], b : CrackerTreeMessageTree[T]) : CrackerTreeMessageTree[T] =
{
var parent = a.parent
if (parent.isEmpty) parent = b.parent
new CrackerTreeMessageTree(parent, a.child ++ b.child)
}
def getMessageNumberForPropagation(step : Int, vertexNumber : Long) =
{
val stepPropagation = (step - 1) / 2
(vertexNumber * stepPropagation) + vertexNumber
}
def getMessageSizeForPropagation(step : Int, vertexNumber : Long) =
{
val stepPropagation = (step - 1) / 2
((vertexNumber * 2) * stepPropagation) - vertexNumber
}
} |
alessandrolulli/knnMeetsConnectedComponents | src/main/java/info/debatty/spark/nndescent/NNDescentMainScala.scala | package info.debatty.spark.nndescent
import util.CCPropertiesImmutable
import org.apache.spark.api.java.JavaSparkContext
import util.CCUtil
import org.apache.spark.rdd.RDD
import util.CCProperties
import info.debatty.java.graphs.Node
import info.debatty.java.stringsimilarity.JaroWinkler
import org.apache.spark.api.java.JavaPairRDD
import info.debatty.java.graphs.NeighborListFactory
import info.debatty.java.graphs.NeighborList
import scala.collection.JavaConversions._
import scala.collection.mutable.ListBuffer
import scala.collection.JavaConverters._
object NNDescentMainScala
{
def main( args_ : Array[String] ) : Unit =
{
val timeBegin = System.currentTimeMillis()
val propertyLoad = new CCProperties("NNDESCENT", args_(0)).load();
val property = propertyLoad.getImmutable;
val nnDescentK = propertyLoad.getInt("nnDescentK", 10);
val nnDescentMaxIteration = propertyLoad.getInt("nnDescentMaxIteration", 20);
val nnDescentMinIteration = propertyLoad.getInt("nnDescentMinIteration", 5);
val nnDescentOneFile = propertyLoad.getBoolean("nnDescentOneFile", false);
val nnDescentMaxDistance = propertyLoad.getBoolean("nnDescentMaxDistance", false);
val util = new CCUtil(property);
val sc = util.getJavaSparkContext();
val file = sc.textFile(property.datasetKNN, property.sparkPartition)
val vertexRDD = util.loadVertexMail(file).map(t => new Node[String](t._1, t._2))
val jaroWinkler = new JaroWinkler
var iteration = nnDescentMinIteration
var nndes = new NNDescent[String]
nndes = nndes.setK(nnDescentK)
nndes = nndes.setMaxIterations(nnDescentMinIteration)
nndes = nndes.setSimilarity(jaroWinkler);
var graph = nndes.initializeAndComputeGraph(vertexRDD.toJavaRDD, new NeighborListFactory, property.sparkPartition);
val graphSize = graph.cache().count;
val timeEnd = System.currentTimeMillis()
util.io.printCommonStat(0,
timeEnd-timeBegin,
timeEnd-timeBegin,
timeEnd-timeBegin,
0,
0,
iteration)
val graphScala = JavaPairRDD.toRDD(graph)
val toPrint2 = graphScala.map(t => t._1.id+"\t"+t._2.getNeighbourId())
if(nnDescentOneFile)
toPrint2.coalesce(1, true).saveAsTextFile(property.datasetCC)
else
toPrint2.saveAsTextFile(property.datasetCC)
nndes = nndes.setMaxIterations(1)
var timeTotal = timeEnd-timeBegin
while(nnDescentMaxIteration > iteration)
{
iteration = iteration + 1
val timeStartIteration = System.currentTimeMillis()
graph = nndes.computeGraph(graph, new NeighborListFactory);
val graphSize = graph.cache().count;
val timeEndIteration = System.currentTimeMillis()
timeTotal = timeTotal + (timeEndIteration - timeStartIteration)
util.io.printCommonStat(0,
timeTotal,
timeTotal,
timeTotal,
0,
0,
iteration)
val graphScala = JavaPairRDD.toRDD(graph)
val toPrint2 = graphScala.map(t => t._1.id+"\t"+t._2.getNeighbourId())
if(property.printAll || iteration % 5 == 0)
{
if(nnDescentOneFile)
toPrint2.coalesce(1, true).saveAsTextFile(property.outputFile+"_ITER_"+iteration)
else
toPrint2.saveAsTextFile(property.outputFile+"_ITER_"+iteration)
}
}
sc.close
}
} |
alessandrolulli/knnMeetsConnectedComponents | src/main/java/util/CCUtilIO.scala | <filename>src/main/java/util/CCUtilIO.scala<gh_stars>1-10
package util
import java.io.FileWriter
import java.text.DecimalFormat
import org.apache.spark.SparkContext._
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import com.google.common.base.Joiner
class CCUtilIO(property : CCPropertiesImmutable) extends Serializable
{
val fileStatDescription = "algorithmName,dataset,partition,step,timeAll,timeLoadingAndComputation,timeComputation,reduceInputMessageNumber,reduceInputSize,ccNumber,ccNumberNoIsolatedVertices,ccMaxSize,customColumn,cores,switchLocal,shuffleManager,compressionCodec,bitmaskCustom,sparkShuffleConsolidateFiles,edgeThreshold"
val fileSimplificationDescritpion = "dataset,step,activeVertices,activeVerticesNormalized,algorithmName,activeEdges,degreeAvg,degreeMax"
val fileTimeStep = "dataset,algorithmName,step,time,cores,switchLocal,shuffleManager,compressionCodec,bitmaskCustom,sparkShuffleConsolidateFiles"
val fileStatDescriptionDiameter = "algorithmName,dataset,partition,step,timeAll,timeLoadingAndComputation,timeComputation,reduceInputMessageNumber,reduceInputSize,diameter,customColumn,cores,switchLocal,shuffleManager,compressionCodec,sparkShuffleConsolidateFiles,selfFunction,candidateFunction,loadBalancing,selfStar,transmitPreviousNeighbours,stepAll,diameterPlus"
val fileSimplificationDescritpionDiameter = "dataset,step,activeVertices,activeVerticesNormalized,algorithmName,activeEdges,degreeAvg,degreeMax,selfFunction,candidateFunction,loadBalancing,selfStar,transmitPreviousNeighbours"
val fileTimeStepDiameter = "dataset,algorithmName,step,time,cores,switchLocal,shuffleManager,compressionCodec,bitmaskCustom,sparkShuffleConsolidateFiles,selfFunction,candidateFunction,loadBalancing,selfStar,transmitPreviousNeighbours"
def printStat( data : Long, description : String ) : Int =
{
printStat(data.toString, description)
}
def printStat( data : Double, description : String ) : Int =
{
printStat(data.toString, description)
}
def printStat( data : String, description : String ) : Int =
{
val printFile = new FileWriter( "time.txt", true )
printFile.write( description + ": " + data + "\n" )
printFile.close
0
}
def printSimplification( step : Int, activeVertices : Long, initialVertices : Long , activeEdges : Double, degreeMax : Int) =
{
val joiner = Joiner.on(",")
val printFile = new FileWriter( "simplification.txt", true )
val token : Array[Object] = Array( property.datasetCC,
step.toString,
activeVertices.toString,
((((activeVertices.toDouble * 100) / initialVertices)*100).round.toDouble / 100).toString,
property.algorithmName,
activeEdges.toString,
(activeEdges / activeVertices).toString,
degreeMax.toString)
printFile.write(joiner.join(token)+ "\n" )
printFile.close
}
def printTimeStep( step : Int, time : Long) =
{
val joiner = Joiner.on(",")
val printFile = new FileWriter( "timeStep.txt", true )
// dataset, algorithmName, step, time
val token : Array[Object] = Array( property.datasetCC,
property.algorithmName,
step.toString,
time.toString,
property.sparkCoresMax.toString,
property.switchLocal.toString,
property.sparkShuffleManager,
property.sparkCompressionCodec)
printFile.write(joiner.join(token)+ "\n" )
printFile.close
}
def printStatSimple(
value : String) =
{
val printFile = new FileWriter( "stats.txt", true )
val joiner = Joiner.on(",")
val token : Array[Object] = Array( property.algorithmName,
property.datasetKNN,
value
)
printFile.write(joiner.join(token)+ "\n" )
printFile.close
}
def printCommonStat(
step : Int,
timaAll : Long,
timeLoadingAndComputation : Long,
timeComputation : Long,
reduceInputMessageNumber : Long,
reduceInputSize : Long,
iteration : Int) =
{
val printFile = new FileWriter( "stats.txt", true )
val joiner = Joiner.on(",")
val desc = "algorithmName,dataset,partition,step,timeAll,timeGraph,timeComputation,messageNumber,messageSize,customColumn,cores,shuffleManager,compression,consolidateFiles,iteration"
val token : Array[Object] = Array( property.algorithmName,
property.datasetKNN,
property.sparkPartition.toString,
step.toString,
timaAll.toString,
timeLoadingAndComputation.toString,
timeComputation.toString,
reduceInputMessageNumber.toString,
reduceInputSize.toString,
property.customColumnValue,
property.sparkCoresMax.toString,
property.sparkShuffleManager,
property.sparkCompressionCodec,
property.sparkShuffleConsolidateFiles,
iteration.toString)
printFile.write(joiner.join(token)+ "\n" )
printFile.close
}
def printAllStat( algorithmName : String,
dataset : String,
partition : Int,
step : Int,
timaAll : Long,
timeLoadingAndComputation : Long,
timeComputation : Long,
reduceInputMessageNumber : Long,
reduceInputSize : Long,
ccNumber : Long,
ccNumberNoIsolatedVertices : Long,
ccMaxSize : Int,
customColumnValue : String,
bitmaskCustom : String) =
{
val printFile = new FileWriter( "stats.txt", true )
val joiner = Joiner.on(",")
val token : Array[Object] = Array( algorithmName,
dataset,
partition.toString,
step.toString,
timaAll.toString,
timeLoadingAndComputation.toString,
timeComputation.toString,
reduceInputMessageNumber.toString,
reduceInputSize.toString,
ccNumber.toString,
ccNumberNoIsolatedVertices.toString,
ccMaxSize.toString,
customColumnValue,
property.sparkCoresMax.toString,
property.switchLocal.toString,
property.sparkShuffleManager,
property.sparkCompressionCodec,
bitmaskCustom,
property.sparkShuffleConsolidateFiles,
property.edgeThreshold.toString)
printFile.write(joiner.join(token)+ "\n" )
printFile.close
}
def printCCDistribution(rdd : RDD[(Long, Int)]) =
{
val printFile = new FileWriter( "distribution.txt", true )
val joiner = Joiner.on(",")
val ccDistribution = rdd.map(t=>(t._2,1)).reduceByKey{case(a,b)=>a+b}.map(t=>t._1+","+t._2+"\n").reduce{case(a,b)=>a+b}
// val token : Array[Object] = Array(algorithmName, dataset, partition.toString, hybridMessageSizeBound.toString, step.toString, timaAll.toString, timeLoadingAndComputation.toString, timeComputation.toString, reduceInputMessageNumber.toString, reduceInputSize.toString, ccNumber.toString, ccMaxSize.toString)
//
// printFile.write(joiner.join(token)+ "\n" )
printFile.write(ccDistribution+ "\n" )
printFile.close
}
def printCCDistributionString(rdd : RDD[(String, Int)]) =
{
val printFile = new FileWriter( "distribution.txt", true )
val joiner = Joiner.on(",")
val ccDistribution = rdd.map(t=>(t._2,1)).reduceByKey{case(a,b)=>a+b}.map(t=>property.datasetCC+","+t._1+","+t._2+","+property.edgeThreshold.toString+"\n").reduce{case(a,b)=>a+b}
// val token : Array[Object] = Array(algorithmName, dataset, partition.toString, hybridMessageSizeBound.toString, step.toString, timaAll.toString, timeLoadingAndComputation.toString, timeComputation.toString, reduceInputMessageNumber.toString, reduceInputSize.toString, ccNumber.toString, ccMaxSize.toString)
//
// printFile.write(joiner.join(token)+ "\n" )
printFile.write(ccDistribution+ "\n" )
printFile.close
}
def printCC(rdd : RDD[(Long, Int)]) =
{
val printFile = new FileWriter( "cc.txt", true )
val joiner = Joiner.on(",")
val ccDistribution = rdd.map(t=>t._1+","+t._2+"\n").reduce{case(a,b)=>a+b}
// val token : Array[Object] = Array(algorithmName, dataset, partition.toString, hybridMessageSizeBound.toString, step.toString, timaAll.toString, timeLoadingAndComputation.toString, timeComputation.toString, reduceInputMessageNumber.toString, reduceInputSize.toString, ccNumber.toString, ccMaxSize.toString)
//
// printFile.write(joiner.join(token)+ "\n" )
printFile.write(ccDistribution+ "\n" )
printFile.close
}
def printEdgelist( data : RDD[(Long,Long)] ) =
{
val collected = data.collect.iterator
val printFile = new FileWriter( "edgelist.txt", true )
while(collected.hasNext)
{
val next = collected.next
printFile.write( next._1+" "+next._2 + "\n" )
}
printFile.close
}
def printFileStart(description : String) =
{
val printFile = new FileWriter( "time.txt", true )
printFile.write("\n"+ description+": START\n" )
printFile.close
}
def printFileEnd(description : String) =
{
val printFile = new FileWriter( "time.txt", true )
printFile.write( description+": END\n" )
printFile.close
}
def printTime( start : Long, end : Long, description : String ) =
{
val printFile = new FileWriter( "time.txt", true )
printFile.write( description + ": " + ( end - start ) + "\n" )
printFile.close
}
def printStep( step : Int ) =
{
val printFile = new FileWriter( "time.txt", true )
printFile.write( "step: "+ step + "\n" )
printFile.close
}
def printTimeStep( start : Long, red : Long, end : Long ) =
{
val printFile = new FileWriter( "time.txt", true )
printFile.write( "blue: " + ( red - start ) + " red: " + ( end - red ) + " all: " + ( end - start ) + "\n" )
printFile.close
}
def printToFile( file : String, data : String ) =
{
val printFile = new FileWriter( file, true )
printFile.write( data )
printFile.close
}
} |
alessandrolulli/knnMeetsConnectedComponents | src/main/java/crackerAllComparable/CrackerMessageIdentification.scala | package crackerAllComparable
@serializable
class CrackerTreeMessageIdentification[T <: Comparable[T]] (val min: T, val neigh: Set[T]) extends CrackerMessageSize
{
def voteToHalt = neigh.isEmpty
def getMessageSize = neigh.size + 1
def merge(other : Option[CrackerTreeMessageIdentification[T]]) : Option[CrackerTreeMessageIdentification[T]] =
{
if(other.isDefined)
{
var minValue = min
if(other.get.min.compareTo(minValue) < 0) minValue = other.get.min
Option.apply(new CrackerTreeMessageIdentification(minValue, neigh ++ other.get.neigh))
} else
{
Option.apply(CrackerTreeMessageIdentification.this)
}
}
override def toString = neigh.toString
}
object CrackerTreeMessageIdentification
{
// def empty[T <: Comparable[T]]() = new CrackerTreeMessageIdentification[T](Option[T].empty, Set())
} |
alessandrolulli/knnMeetsConnectedComponents | src/main/java/crackerAllComparable/CrackerMessageRedPhase.scala | package crackerAllComparable
@serializable
class CrackerTreeMessageRedPhase [T <: Comparable[T]](val first : Option[CrackerTreeMessageIdentification[T]], val second : Option[CrackerTreeMessageTree[T]]) extends CrackerMessageSize
{
def getMessageSize = 0//first.getOrElse(CrackerTreeMessageIdentification.empty).getMessageSize + second.getOrElse(CrackerTreeMessageTree.empty).getMessageSize
}
object CrackerTreeMessageRedPhase
{
def apply[T <: Comparable[T]](first : CrackerTreeMessageIdentification[T]) = new CrackerTreeMessageRedPhase[T](Option.apply(first), Option.empty)
def apply[T <: Comparable[T]](second : CrackerTreeMessageTree[T]) = new CrackerTreeMessageRedPhase[T](Option.empty, Option.apply(second))
} |
alessandrolulli/knnMeetsConnectedComponents | src/main/java/crackerAllComparable/CrackerMessageTree.scala | package crackerAllComparable
@serializable
class CrackerTreeMessageTree[T <: Comparable[T]] (val parent : Option[T], val child : Set[T]) extends CrackerMessageSize
{
def getMessageSize = child.size + 1
def merge(other : Option[CrackerTreeMessageTree[T]]) : Option[CrackerTreeMessageTree[T]] =
{
if(other.isDefined)
{
var parentNew = parent
if(parentNew.isEmpty)
{
parentNew = other.get.parent
}
Option.apply(new CrackerTreeMessageTree(parentNew, child ++ other.get.child))
} else
{
Option.apply(CrackerTreeMessageTree.this)
}
}
def merge(other : CrackerTreeMessageTree[T]) : CrackerTreeMessageTree[T] =
{
var parentNew = parent
if(parentNew.isEmpty)
{
parentNew = other.parent
}
new CrackerTreeMessageTree(parentNew, child ++ other.child)
}
def getMessagePropagation(id : T) =
{
if(parent.isEmpty)
{
new CrackerTreeMessagePropagation[T](Option.apply(id), child)
} else
{
new CrackerTreeMessagePropagation[T](Option.empty, child)
}
}
}
object CrackerTreeMessageTree
{
def empty = new CrackerTreeMessageTree(Option.empty, Set())
} |
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/tuple/CanFromLiterals.scala | <reponame>ithinkicancode/shapesafe
package org.shapesafe.core.tuple
import shapeless.{::, HList}
trait CanFromLiterals extends CanCons {
_self: TupleSystem =>
object FromLiterals extends AbstractFromHList {
implicit def inductive[
H_TAIL <: HList,
TAIL <: Tuple,
HEAD <: UpperBound with Singleton
](
implicit
forTail: H_TAIL ==> TAIL,
cons: Cons[TAIL, HEAD]
): (HEAD :: H_TAIL) ==> cons.ConsResult = {
forAll[HEAD :: H_TAIL].==> { v =>
val prev = apply(v.tail)
cons(prev, v.head)
}
}
}
}
|
ithinkicancode/shapesafe | macro/src/main/scala/org/shapesafe/m/FieldTypes.scala | package org.shapesafe.m
import shapeless.ops.hlist.Mapper
import shapeless.{Generic, HList, Poly1, Typeable}
import scala.language.experimental.macros
import scala.reflect.macros.whitebox
trait FieldTypes[A <: Product] {
type Out <: HList
}
object FieldTypes {
case class Impl[A <: Product, O <: HList]() extends FieldTypes[A] {
type Out = O
}
type Aux[A <: Product, Out0 <: HList] = FieldTypes[A] { type Out = Out0 }
implicit def mkFieldTypes[A <: Product, L <: HList](
implicit
generic: Generic.Aux[A, L],
mapper: Mapper[typeablePoly.type, L]
) = Impl[A, mapper.Out]()
object typeablePoly extends Poly1 {
implicit def cse[A](
implicit
typeable: Typeable[A]
): Case[A] = macro cseImpl[A]
def cseImpl[A: c.WeakTypeTag](c: whitebox.Context)(typeable: c.Tree): c.Tree = {
import c.universe._
val tpA = weakTypeOf[A]
val describe = c.untypecheck(q"$typeable.describe")
println(describe)
val str = c.eval(
c.Expr[String](describe)
)
q"null.asInstanceOf[FieldTypes.typeablePoly.Case.Aux[$tpA, _root_.shapeless.labelled.FieldType[$str, $tpA]]]"
}
}
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/arity/ProveArity.scala | <reponame>ithinkicancode/shapesafe
package org.shapesafe.core.arity
import org.shapesafe.core.ProofSystem
/**
* Represents a reified Arity
*/
object ProveArity extends ProofSystem[Arity] {}
|
ithinkicancode/shapesafe | core/src/test/scala/org/shapesafe/core/util/RecordViewSpec.scala | <filename>core/src/test/scala/org/shapesafe/core/util/RecordViewSpec.scala
package org.shapesafe.core.util;
import org.shapesafe.BaseSpec
import shapeless.HNil;
class RecordViewSpec extends BaseSpec {
import shapeless.syntax.singleton._
val record = ("a" ->> 1) ::
("b" ->> "x") ::
HNil
val view = RecordView(record)
it(classOf[view.GetV].getSimpleName) {
object poly extends view.GetV
// val get = RecordUtils.GetV(record)
// https://stackoverflow.com/questions/66036106/can-shapeless-record-type-be-used-as-a-poly1-part-2
assert(poly.apply("a".narrow) == 1)
assert(poly("b".narrow) == "x")
assert(
("b".narrow :: "a".narrow :: HNil).map(poly) ==
("x" :: 1 :: HNil)
)
}
it(classOf[view.GetField].getSimpleName) {
object poly extends view.GetField
// TODO: need type check
assert(poly.apply("a".narrow) == "a" ->> 1)
assert(poly.apply("b".narrow) == "b" ->> "x")
assert(
("b".narrow :: "a".narrow :: HNil).map(poly) ==
("b" ->> "x") ::
("a" ->> 1) ::
HNil
)
}
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/arity/LeafArity.scala | <gh_stars>1-10
package org.shapesafe.core.arity
import scala.language.implicitConversions
/**
* Irreducible
*/
trait LeafArity extends VerifiedArity {}
object LeafArity {}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/Poly1Base.scala | <gh_stars>1-10
package org.shapesafe.core
import shapeless.Poly1
import scala.annotation.implicitNotFound
// TODO: compiler bug?
// https://stackoverflow.com/questions/65944627/in-scala-how-could-a-covariant-type-parameter-be-an-upper-bound-of-an-abstract
// TODO: merge into shapeless Poly1
trait Poly1Base[IUB, OUB] {
final type _IUB = IUB
final type _OUB = OUB
// TODO: how to override it in subclasses?
@implicitNotFound(
"[MISSING]:\n${I}\n ==>\n???\n"
)
trait Case[-I <: IUB] {
type Out <: OUB
def apply(v: I): Out
}
object Case {
type Aux[
I <: IUB,
O <: OUB
] = Case[I] {
type Out = O
}
// only use as an implicit type parameter if Output type doesn't depends on O!
type Lt[
I <: IUB,
O <: OUB
] = Case[I] {
type Out <: O
}
}
@implicitNotFound(
"[MISSING]:\n${I}\n ==>\n${O}\n"
)
class ==>[
-I <: IUB,
O <: OUB
](val toOut: I => O)
extends Case[I] {
final type Out = O
override def apply(v: I): O = toOut(v)
}
def forAll[I <: IUB] = new Factory[I]() // same as `at` in Poly1?
protected class Factory[I <: IUB]() {
def ==>[O <: OUB](fn: I => O): I ==> O = new (I ==> O)(fn)
}
def summon[I <: IUB](
implicit
_case: Case[I]
): _case.type = _case
def summonFor[I <: IUB](v: I)(
implicit
_case: Case[I]
): _case.type = _case
def apply[I <: IUB](v: I)(
implicit
_case: Case[I]
): _case.Out = _case.apply(v)
object AsShapelessPoly1 extends Poly1 {
val outer: Poly1Base[IUB, OUB] = Poly1Base.this
implicit def delegate[I <: IUB, O <: OUB](
implicit
from: I ==> O
): Case.Aux[I, O] = at[I].apply { ii =>
from.apply(ii)
}
}
// object AsShapelessPoly2 extends Poly2 { TODO
// case class ComposeDep1[F[??] <: DepFn1[??]]() extends Poly1Base[Any, OUB] {
//
// implicit def chain[
// S,
// I <: IUB
// ](
// implicit
// dep1: F[S] { type Out <: I },
// _case: Poly1Base.this.Case[I]
// ): S ==> _case.Out = from[S].to { ss =>
// val i = dep1.apply(ss)
// _case(i)
// }
// }
//
// case class ComposeDep2[F[A, B] <: DepFn2[A, B]]() extends Poly1Base[Any, OUB] {
//
// implicit def chain[
// S1,
// S2,
// I <: IUB
// ](
// implicit
// dep2: F[S1, S2] { type Out <: I },
// _case: Poly1Base.this.Case[I]
// ): (S1, S2) ==> _case.Out = from[(S1, S2)].to { ss =>
// val i = dep2.apply(ss._1, ss._2)
// _case(i)
// }
// }
}
object Poly1Base {}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/shape/unary/ReduceByName.scala | <reponame>ithinkicancode/shapesafe
package org.shapesafe.core.shape.unary
import org.shapesafe.graph.commons.util.HasOuter
import org.shapesafe.core.axis.Axis.UB_->>
import org.shapesafe.core.axis.RecordUpdater
import org.shapesafe.core.debugging.Expressions.Expr
import org.shapesafe.core.debugging.{DebugSymbol, Expressions, OpStrs}
import org.shapesafe.core.shape.{LeafShape, ProveShape, Shape}
import shapeless.{::, HList}
import scala.language.implicitConversions
trait ReduceByName {
import ProveShape.Factory._
import ProveShape._
type _Unary <: DebugSymbol.On1
val oldNameUpdater: RecordUpdater
// all names must be distinctive - no duplication allowed
trait _On[
S1 <: Shape
] extends Conjecture1.^[S1]
with HasOuter {
override def outer: ReduceByName.this.type = ReduceByName.this
def s1: S1 with Shape
override type _AsOpStr = OpStrs.PrefixW1[_Unary#_AsOpStr, S1]
override type _AsExpr = _Unary#On[Expr[S1]]
}
object _On {
implicit def simplify[
S1 <: Shape,
P1 <: LeafShape
](
implicit
lemma: S1 |- P1,
toShape: _Indexing.ToShape.Case[P1#Record]
): _On[S1] =>> toShape.Out = {
ProveShape.forAll[_On[S1]].=>> { v =>
val p1 = lemma.valueOf(v.s1)
val result = toShape.apply(p1.record)
result
}
}
}
case class On[
S1 <: Shape
](
override val s1: S1 with Shape
) extends _On[S1] {}
object _Indexing extends UnaryIndexingFn.Distinct {
implicit def consOldName[
TI <: HList,
TO <: HList,
HI <: UB_->>
](
implicit
consTail: TI ==> TO,
oldName: oldNameUpdater.Case[(TO, HI)]
): (HI :: TI) ==> oldName.Out = {
forAll[HI :: TI].==> { v =>
val ti = v.tail
val to = consTail(ti)
oldName(to, v.head)
}
}
}
type _Indexing = _Indexing.type
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/tuple/TupleSystem.scala | <gh_stars>1-10
package org.shapesafe.core.tuple
import org.shapesafe.core.Poly1Base
import shapeless.{HList, HNil}
trait TupleSystem {
type UpperBound
type Tuple
type Eye <: Tuple
def Eye: Eye
// type ><[
// TAIL <: Impl,
// HEAD <: UpperBound
// ] <: Impl
trait AbstractFromHList extends Poly1Base[HList, Tuple] {
final val outer = TupleSystem.this
implicit val toEye: HNil ==> Eye = {
forAll[HNil].==> { _ =>
Eye
}
}
}
}
object TupleSystem {}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/arity/binary/Op2Like.scala | package org.shapesafe.core.arity.binary
import org.shapesafe.graph.commons.util.HasOuter
import org.shapesafe.core.arity.{Arity, ArityAPI, ArityConjecture}
import org.shapesafe.core.debugging.Expressions.Expr
import org.shapesafe.core.debugging.{DebugSymbol, OpStrs}
trait Op2Like extends Op2Like.DebuggingSupport {
trait Conjecture2[
A1 <: Arity,
A2 <: Arity
] extends ArityConjecture
with HasOuter {
def a1: A1
def a2: A2
final def outer: Op2Like.this.type = Op2Like.this
final override type _AsOpStr = OpStrs.Infix[A1, Debug[Unit, Unit]#_AsOpStr, A2] // TODO: add Bracket
final override type _AsExpr = Debug[Expr[A1], Expr[A2]]
}
type On[
A1 <: Arity,
A2 <: Arity
] <: Conjecture2[A1, A2]
def on(
a1: ArityAPI,
a2: ArityAPI
): On[a1._Arity, a2._Arity]
// object AsShapelessPoly2 extends Poly2 {
//
// implicit def trivial[
// A1 <: ArityCore,
// A2 <: ArityCore
// ]: Case.Aux[A1, A2, On[A1, A2]] = {
// at[A1, A2].apply { (a1, a2) =>
// Op2Like.this.on(a1, a2)
// }
// }
// }
// type AsShapelessPoly2 = AsShapelessPoly2.type
}
object Op2Like {
trait DebuggingSupport {
self: Op2Like =>
type Debug[A1, A2] <: DebugSymbol
// implicit def debug[
// A1 <: Arity,
// A2 <: Arity,
// I1 <: Arity.HasInfo,
// I2 <: Arity.HasInfo
// ](
// implicit
// toInfo1: ProveArity.|-[A1, I1],
// toInfo2: ProveArity.|-[A2, I2],
// fail: FailOn[I1, I2]
// ): On[A1, A2] =>> Arity = ProveArity.forAll[On[A1, A2]].=>> {
// ???
// }
}
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/debugging/CanPeek.scala | <reponame>ithinkicancode/shapesafe<gh_stars>1-10
package org.shapesafe.core.debugging
trait CanPeek {
type _AsOpStr // use singleton-ops
type _AsExpr // use TypeVizCT macro
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/shape/args/ApplyArgs.scala | package org.shapesafe.core.shape.args
import org.shapesafe.core.Poly1Base
import org.shapesafe.core.shape.Names
import shapeless.HList
trait ApplyArgs {
type OUB
val fromHList: Poly1Base[HList, OUB]
type Result[T <: OUB]
def toResult[T <: OUB](v: T): Result[T]
}
object ApplyArgs {
trait Direct extends ApplyArgs {
override type Result[T <: OUB] = T
override def toResult[T <: OUB](v: T): T = v
}
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/shape/Names.scala | <filename>core/src/main/scala/org/shapesafe/core/shape/Names.scala
package org.shapesafe.core.shape
import org.shapesafe.core.debugging.CanPeek
import org.shapesafe.core.shape.args.ApplyLiterals
import org.shapesafe.core.tuple._
import shapeless.Witness
import scala.language.implicitConversions
trait Names extends IndicesMagnet with Names.proto.Tuple {}
object Names extends TupleSystem with CanCons with CanFromLiterals with ApplyLiterals.ToNames {
type UpperBound = String
object proto extends StaticTuples.Total[UpperBound] with CanInfix_>< {}
type Tuple = Names
class Eye extends proto.Eye with Names {
override type AsIndices = Indices.Eye
override def asIndices: Indices.Eye = Indices.Eye
}
lazy val Eye = new Eye
class ><[
TAIL <: Tuple,
HEAD <: UpperBound
](
override val tail: TAIL,
override val head: HEAD
) extends proto.><[TAIL, HEAD](tail, head)
with Tuple {
val headW: Witness.Aux[HEAD] = Witness[HEAD](head).asInstanceOf[Witness.Aux[HEAD]]
override type AsIndices = Indices.><[tail.AsIndices, Index.Name[HEAD]]
override def asIndices: AsIndices =
tail.asIndices >< Index.Name(headW)
trait PeekHead extends CanPeek {
override type _AsOpStr = Head
override type _AsExpr = Head
}
}
implicit def consW[TAIL <: Tuple, HEAD <: String]: Cons.FromFn2[TAIL, HEAD, TAIL >< HEAD] = {
Cons.from[TAIL, HEAD].to { (tail, head) =>
new ><(tail, head)
}
}
implicit class Infix[SELF <: Tuple](self: SELF) {
def ><(name: Witness.Lt[String]): SELF >< name.T = {
new ><(self, name.value)
}
}
implicit def toEyeInfix(s: Names.type): Infix[Eye] = Infix(Eye)
trait Syntax {
implicit def literalToNames(v: String)(
implicit
w: Witness.Aux[v.type]
): Eye >< v.type = {
Eye >< w
}
implicit def literalToInfix(v: String)(
implicit
w: Witness.Aux[v.type]
): Infix[Eye >< v.type] = {
Infix(Eye >< w)
}
}
object Syntax extends Syntax
}
|
ithinkicancode/shapesafe | macro/src/test/scala/org/shapesafe/m/viz/KindVizCTSpec.scala | <filename>macro/src/test/scala/org/shapesafe/m/viz/KindVizCTSpec.scala
package org.shapesafe.m.viz
import org.shapesafe.graph.commons.testlib.BaseSpec
import org.shapesafe.graph.commons.util.reflect.Reflection
import org.shapesafe.graph.commons.util.reflect.format.FormatProtos.{DeAlias, Hide}
import org.shapesafe.graph.commons.util.reflect.format.FormatOvrd.{~~, Only}
import org.shapesafe.graph.commons.util.viz.TypeViz
import shapeless.Witness
class KindVizCTSpec extends BaseSpec {
import KindVizCTSpec._
val gd = viz.infer(Witness("Int").value)
describe("Stub") {
it("ground truth") {
gd.typeStr.shouldBe(
"""String("Int")"""
)
}
it("Int") {
val w1 = stub.infoOf[Int]
viz[w1.Out].should_=:=(gd)
}
it(" ... implicitly") {
val w1 = stub[Int].summon
// viz.infer(w1).should_=:=()
viz[w1.Out].should_=:=(gd)
}
it("generic 1") {
val w1 = stub.infoOf[Dummy[_, _]]
viz[w1.Out].typeStr
.shouldBe(
s"""String("KindVizCTSpec.Dummy")"""
)
}
//TODO: for some reason, not working
ignore("generic 2") {
val e1 = Dummy[Int, String]()
viz[e1.infoOf.type#Out].typeStr
.shouldBe(
s"""String("KindVizCTSpec.Dummy")"""
)
}
}
describe("Ovrd") {
it("1") {
val w1 = ovrd.infoOf[Dummy[Int, String]]
viz[w1.Out].typeStr
.shouldBe(
s"""String("Int a b String")"""
)
}
it(" ... implicitly") {
val w1 = ovrd[Dummy[Int, String]].summon
viz[w1.Out].typeStr
.shouldBe(
s"""String("Int a b String")"""
)
}
describe("abort on error") {
val localV = "a"
it("1") {
val w1 = ovrd.infoOf[Only[localV.type]]
viz[w1.Out].typeStr
.shouldBe(
s"""String("FormatOvrd.Only")"""
)
}
it("2") {
val w1 = ovrd.infoOf[Only[nonFinalV.type]]
viz[w1.Out].typeStr
.shouldBe(
s"""String("FormatOvrd.Only")"""
)
}
}
}
}
object KindVizCTSpec {
val viz: TypeViz[Reflection.Runtime.type] = TypeViz.formattedBy {
import org.shapesafe.graph.commons.util.reflect.format.Formats._
TypeInfo ~ DeAlias ~ Hide.Package
}
val stub = KindVizCT.NoTree
val ovrd = KindVizCT.Ovrd
final val finalV = "b"
val nonFinalV = "b"
case class Dummy[T1, T2]() extends (T1 ~~ Only["a"] ~~ Only[finalV.type] ~~ T2) {
final val infoOf = stub.infoOf[Dummy[T1, T2]]
}
object Dummy {
lazy val name: String = classOf[Dummy[_, _]].getCanonicalName
}
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/debugging/Expressions.scala | <reponame>ithinkicancode/shapesafe<filename>core/src/main/scala/org/shapesafe/core/debugging/Expressions.scala
package org.shapesafe.core.debugging
object Expressions extends Expressions_Imp0 {
trait +[A, B] extends DebugSymbol {
type _AsOpStr = " + "
}
trait -[A, B] extends DebugSymbol {
type _AsOpStr = " - "
}
trait *[A, B] extends DebugSymbol {
type _AsOpStr = " * "
}
trait /[A, B] extends DebugSymbol {
type _AsOpStr = " / "
}
trait ==[A, B] extends DebugSymbol.Require {
type _AsOpStr = " == "
type Complement = !=[A, B]
}
trait !=[A, B] extends DebugSymbol.Require {
type _AsOpStr = " != "
type Complement = ==[A, B]
}
trait >[A, B] extends DebugSymbol.Require {
type _AsOpStr = " > "
type Complement = <=[A, B]
}
trait >=[A, B] extends DebugSymbol.Require {
type _AsOpStr = " >= "
type Complement = >[A, B]
}
trait <[A, B] extends DebugSymbol.Require {
type _AsOpStr = " < "
type Complement = >=[A, B]
}
trait <=[A, B] extends DebugSymbol.Require {
type _AsOpStr = " <= "
type Complement = >[A, B]
}
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/arity/binary/Op2.scala | <reponame>ithinkicancode/shapesafe<filename>core/src/main/scala/org/shapesafe/core/arity/binary/Op2.scala
package org.shapesafe.core.arity.binary
import org.shapesafe.core.arity.Const
import org.shapesafe.core.arity.ProveArity.|-<
import org.shapesafe.core.arity.Utils.Op
import org.shapesafe.core.arity._
import org.shapesafe.core.debugging.{DebugSymbol, DebugUtil, OpStrs}
import singleton.ops.+
import scala.collection.mutable
import scala.language.implicitConversions
trait Op2 extends Op2Like {
type Lemma[X1, X2] <: Op
}
object Op2 extends Op2_Imp0 {
class Impl[
??[X1, X2] <: Op,
SS[A, B] <: DebugSymbol
](
implicit
sh: Utils.IntSh[??]
) extends Op2 {
override type Lemma[X1, X2] = ??[X1, X2]
override type Debug[A, B] = SS[A, B]
case class On[
A1 <: Arity,
A2 <: Arity
](
a1: A1,
a2: A2
) extends Conjecture2[A1, A2] {
// TODO: can this be VerifiedArity?
override type _Refute =
DebugUtil.REFUTE.T + OpStrs.Infix[A1, SS[Unit, Unit]#_AsOpStr, A2] + DebugUtil.UNDEFINED.T
override lazy val runtimeArity: Int = sh.apply(a1.runtimeArity, a2.runtimeArity).getValue
}
override def on(a1: ArityAPI, a2: ArityAPI): On[a1._Arity, a2._Arity] = On(a1.arity, a2.arity)
}
lazy val cache = mutable.Map.empty[AnyRef, Op2]
def apply[
??[X1, X2] <: Op,
SS[A, B] <: DebugSymbol
](
implicit
sh: Utils.IntSh[??]
): Impl[??, SS] = {
cache
.getOrElseUpdate(
sh,
new Impl[??, SS]
)
.asInstanceOf[Impl[??, SS]]
}
implicit def invar[
A1 <: Arity,
A2 <: Arity,
S1,
S2,
OP <: Op2
](
implicit
bound1: A1 |-< Const[S1], // TODO: make it similar to unsafe
bound2: A2 |-< Const[S2],
lemma: OP#Lemma[S1, S2]
) = {
ProveArity.forAll[OP#On[A1, A2]].=>> { _ =>
Const.Derived.summon[OP#Lemma[S1, S2]](lemma)
}
}
// def apply[
// ??[X1, X2] <: Op,
// A1 <: ArityCore,
// A2 <: ArityCore
// ](
// a1: A1,
// a2: A2
// )(
// implicit
// sh: Utils.IntSh[??]
// ): Op2[??]#On[A1, A2] = {
//
// val op2 = new Op2[??] // TODO: should be cached
//
// op2.On(a1, a2)
// }
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/arity/binary/Require2.scala | package org.shapesafe.core.arity.binary
import org.shapesafe.core.arity.Const
import org.shapesafe.core.arity.ProveArity.|-<
import org.shapesafe.core.arity.Utils.Op
import org.shapesafe.core.arity.{Arity, ArityAPI, ProveArity, Utils}
import org.shapesafe.core.debugging.Reporters.ForArity
import org.shapesafe.core.debugging.{DebugSymbol, DebugUtil, OpStrs}
import scala.collection.mutable
/**
* Output is always the TIGHTEST TYPE CONSTRAINT of the FIRST argument, no exception
*/
trait Require2 extends Op2Like {
type Lemma[X1, X2] <: Op
}
object Require2 extends Require2_Imp0 {
import ProveArity.Factory._
import singleton.ops._
class Impl[
??[X1, X2] <: Op,
SS[A, B] <: DebugSymbol.Require
](
implicit
sh: Utils.BoolSh[??]
) extends Require2 {
// TODO: this should supersedes AssertEqual
override type Lemma[X1, X2] = ??[X1, X2]
override type Debug[A, B] = SS[A, B]
import singleton.ops._
case class On[
A1 <: Arity,
A2 <: Arity
](
a1: A1,
a2: A2
) extends Conjecture2[A1, A2] {
override type _Refute =
DebugUtil.REFUTE.T + OpStrs.Infix[A1, SS[Unit, Unit]#Complement#_AsOpStr, A2]
override lazy val runtimeArity: Int = {
val v1 = a1.runtimeArity
val v2 = a2.runtimeArity
require(sh.apply(v1, v2).getValue, "runtime Requirement failed")
v1
}
}
override def on(a1: ArityAPI, a2: ArityAPI): On[a1._Arity, a2._Arity] = {
On(a1.arity, a2.arity)
}
}
lazy val cache = mutable.Map.empty[AnyRef, Require2]
def apply[
??[X1, X2] <: Op,
SS[A, B] <: DebugSymbol.Require
](
implicit
sh: Utils.BoolSh[??]
): Impl[??, SS] = {
cache
.getOrElseUpdate(
sh,
new Impl[??, SS]
)
.asInstanceOf[Impl[??, SS]]
}
implicit def invar[
A1 <: Arity,
A2 <: Arity,
S1,
S2,
OP <: Require2,
MSG
](
implicit
bound1: A1 |-< Const[S1],
bound2: A2 |-< Const[S2],
refute0: ForArity.Refute0[OP#On[Const[S1], Const[S2]], MSG],
lemma: RequireMsg[
OP#Lemma[S1, S2],
MSG
]
): OP#On[A1, A2] =>> Const[S1] = {
ProveArity.forAll[OP#On[A1, A2]].=>> { v =>
bound1.valueOf(v.a1)
}
}
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/shape/binary/OuterProduct.scala | package org.shapesafe.core.shape.binary
import org.shapesafe.core.axis.Axis
import org.shapesafe.core.debugging.Expressions.Expr
import org.shapesafe.core.debugging.{Expressions, OpStrs}
import org.shapesafe.core.shape.LeafShape.><
import org.shapesafe.core.shape.ProveShape._
import org.shapesafe.core.shape.{LeafShape, Shape}
import shapeless.HList
import shapeless.ops.hlist.Prepend
case class OuterProduct[
S1 <: Shape,
S2 <: Shape
](
s1: S1,
s2: S2
) extends Conjecture2.^[S1, S2] {
override type _AsOpStr = OpStrs.Infix[S1, " OuterProduct ", S2]
override type _AsExpr = Expressions.><[Expr[S1], Expr[S2]]
}
trait OuterProduct_Imp0 {
import org.shapesafe.core.shape.ProveShape.Factory._
//TODO: should leverage append, if the deadlock problem has been solved
implicit def simplify[
S1 <: Shape,
P1 <: LeafShape,
S2 <: Shape,
P2 <: LeafShape,
HO <: HList
](
implicit
lemma1: S1 |- P1,
lemma2: S2 |- P2,
concat: Prepend.Aux[P2#Static, P1#Static, HO],
toShape: LeafShape.FromStatic.Case[HO]
): OuterProduct[S1, S2] =>> toShape.Out = {
forAll[OuterProduct[S1, S2]].=>> { direct =>
val p1: P1 = lemma1.valueOf(direct.s1)
val p2: P2 = lemma2.valueOf(direct.s2)
toShape(concat(p2.static, p1.static))
}
}
}
object OuterProduct extends OuterProduct_Imp0 {
import org.shapesafe.core.shape.ProveShape.Factory._
// shortcut for trivial D + 1 case
implicit def append[
S1 <: Shape,
P1 <: LeafShape,
S2 <: Shape,
A2 <: Axis,
P2 <: LeafShape.Eye >< A2
](
implicit
lemma1: S1 |- P1,
lemma2: S2 |- P2
): OuterProduct[S1, S2] =>> (P1 >< A2) = {
forAll[OuterProduct[S1, S2]].=>> { direct =>
val p1: P1 = lemma1.valueOf(direct.s1)
val p2: P2 = lemma2.valueOf(direct.s2)
val a2: A2 = p2.head
val result = p1.^ appendInner a2
result
}
}
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/shape/ShapeAPI.scala | package org.shapesafe.core.shape
import org.shapesafe.core.arity.ops.ArityOps
import org.shapesafe.core.arity.{Arity, Const, LeafArity}
import org.shapesafe.core.shape.LeafShape.><^
import org.shapesafe.core.shape.ProveShape.|-
import org.shapesafe.core.shape.binary.OuterProduct
import org.shapesafe.core.shape.ops.{EinSumOps, LeafOps, MatrixOps, VectorOps}
import org.shapesafe.core.shape.unary._
import shapeless.ops.hlist.Reverse
import shapeless.ops.nat.ToInt
import shapeless.{HList, Nat, SingletonProductArgs, Witness}
import scala.language.implicitConversions
trait ShapeAPI extends VectorOps with MatrixOps {
import ShapeAPI._
final override def toString: String = shape.toString
final def verify[
O <: Shape
](
implicit
prove: _Shape |- O
): ^[O] = prove.valueOf(shape).^
// final def simplify[ // TODO: no use at the moment
// O <: LeafShape
// ](
// implicit
// prove: _Shape |- O
// ): ^[O] = prove.valueOf(shape).^
def eval[ // TODO: eval each member?
O <: LeafShape
](
implicit
prove: _Shape |- O
): ^[O] = verify(prove)
def peek(
implicit
reporter: ShapeReporters.PeekShape.Case[_Shape]
): this.type = this
def interrupt(
implicit
reporter: ShapeReporters.InterruptShape.Case[_Shape]
): this.type = this
def reason[
O <: LeafShape
](
implicit
reporter: ShapeReporters.PeekShape.Case[_Shape],
prove: _Shape |- O
): ^[O] = eval(prove)
/**
* assign new names
*/
object namedWith {
def apply[N <: Names](newNames: N): ^[|<<-[_Shape, N]] = {
new |<<-[_Shape, N](shape.^, newNames).^
}
}
lazy val |<<- : namedWith.type = namedWith
// no need for Names constructor
object named extends SingletonProductArgs {
def applyProduct[H1 <: HList, H2 <: HList](
v: H1
)(
implicit
reverse: Reverse.Aux[H1, H2],
lemma: Names.FromLiterals.Case[H2]
): ^[|<<-[_Shape, lemma.Out]] = {
val out = lemma.apply(reverse(v))
namedWith.apply(out)
}
}
lazy val |<<-* : named.type = named
object >< {
def apply(
that: ShapeAPI
): ^[OuterProduct[_Shape, that._Shape]] = {
OuterProduct(shape, that.shape).^
}
// TODO: redundant?
// def apply[THAT <: Shape](
// that: ^[THAT]
// ): ^[OuterProduct[_Shape, that._Shape]] = {
//
// OuterProduct(shape, that.shape).^
// }
}
def outer: ><.type = ><
def einSum: EinSumOps[_Shape] = EinSumOps(shape)
def contract[N <: Names](names: N): ^[Reorder[CheckEinSum[_Shape], N]] = {
einSum.-->(names)
}
object Sub {
def apply[T <: Index](v: T): ^[GetSubscript[_Shape, T]] = {
GetSubscript(shape, v).^
}
def apply(i: Nat)(
implicit
toIntN: ToInt[i.N]
): ^[GetSubscript[_Shape, Index.I_th[i.N]]] = {
apply(Index.I_th(i))
}
def apply(w: Witness.Lt[String]): ^[GetSubscript[_Shape, Index.Name[w.T]]] = {
apply(Index.Name(w))
}
}
def flattenWith(
infix: ArityOps.Infix,
that: ShapeAPI
): ^[infix._SquashByName.On[OuterProduct[_Shape, that._Shape]]] = {
val outerP = ><(that)
infix._SquashByName.On(outerP).^
}
def flatten(
infix: ArityOps.Infix
): ^[infix._SquashByName.On[_Shape]] = {
infix._SquashByName.On(this).^
}
def transposeWith[N <: Names](names: N): ^[Reorder[CheckDistinct[_Shape], N]] = {
val distinct = CheckDistinct(shape)
val result = Reorder(distinct, names)
result.^
}
object transpose extends SingletonProductArgs {
def applyProduct[H1 <: HList, H2 <: HList](
v: H1
)(
implicit
reverse: Reverse.Aux[H1, H2],
lemma: Names.FromLiterals.Case[H2]
): ^[Reorder[CheckDistinct[_Shape], lemma.Out]] = {
val out = lemma.apply(reverse(v))
transposeWith(out)
}
}
def dimensionWise(
infix: ArityOps.Infix,
that: ShapeAPI
): ^[infix._DimensionWise.On[_Shape, that._Shape]] = {
infix._DimensionWise.On(this, that).^
}
def requireEqual(
that: ShapeAPI
): ^[ArityOps.==!._DimensionWise.On[_Shape, that._Shape]] = dimensionWise(ArityOps.==!, that)
}
object ShapeAPI {
type Aux[T] = ShapeAPI { type _Shape = T }
implicit def unbox[S <: Shape](v: Aux[S]): S = v.shape
implicit def fromIntS[T <: Int with Singleton](v: T)(
implicit
toW: Witness.Aux[T]
): ^[LeafShape.Eye ><^ Const.Literal[T]] = {
^(Shape >|< Arity(toW))
}
implicit def asLeaf[T <: LeafShape](v: Aux[T]): LeafOps[T] = LeafOps(v.shape)
case class ^[SELF <: Shape](shape: SELF) extends ShapeAPI {
final type _Shape = SELF
}
// TODO: only support LeafShape, remove
// object Vector {
//
// type Aux[T <: Axis] = ^[➊ >< T]
// }
// type Vector = Vector.Aux[_]
//
// object Matrix {
//
// type Aux[T1 <: Axis, T2 <: Axis] = ^[➊ >< T1 >< T2]
// }
// type Matrix = Matrix.Aux[_ <: Axis, _ <: Axis]
}
|
ithinkicancode/shapesafe | macro/src/main/scala/org/shapesafe/m/viz/ExpressionVizCT.scala | <filename>macro/src/main/scala/org/shapesafe/m/viz/ExpressionVizCT.scala
package org.shapesafe.m.viz
import org.shapesafe.graph.commons.util.reflect.format.{FormatOvrd, FormatProtos, Formats}
import org.shapesafe.graph.commons.util.viz.TypeVizFormat
import scala.language.experimental.macros
case object ExpressionVizCT extends VizCTSystem {
override def format: TypeVizFormat = FormatProtos.TransformText(
FormatProtos.Trials(
FormatOvrd.Only,
Formats.TypeInfo.DeAlias.HideStatic.recursively
)
)
override def useTree: Boolean = true
implicit def infoOf[I]: InfoOf[I] = macro VizCTSystem.Macros.infoOf[I, this.type]
case object NoTree extends Updated {
override def useTree: Boolean = false
implicit def infoOf[I]: InfoOf[I] = macro VizCTSystem.Macros.infoOf[I, this.type]
}
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/shape/unary/|<<-.scala | <reponame>ithinkicancode/shapesafe
package org.shapesafe.core.shape.unary
import org.shapesafe.core.debugging.Expressions.Expr
import org.shapesafe.core.debugging.{DebugUtil, Expressions, OpStrs, Reporters}
import org.shapesafe.core.shape.ProveShape._
import org.shapesafe.core.shape.{LeafShape, Names, Shape}
import org.shapesafe.m.viz.VizCTSystem.EmitError
import shapeless.HList
import shapeless.ops.hlist.ZipWithKeys
case class |<<-[
S1 <: Shape,
N <: Names
](
s1: S1 with Shape,
newNames: N
) extends Conjecture1.^[S1] {
override type _AsOpStr = OpStrs.Infix[S1, " |<<- ", N]
override type _AsExpr = Expressions.|<<-[Expr[S1], Expr[N]]
override type _Refute = "Dimension mismatch"
}
trait NamedWith_Imp0 {
import org.shapesafe.core.shape.ProveShape.Factory._
implicit def refute[
S1 <: Shape,
P1 <: LeafShape,
N <: Names,
MSG
](
implicit
lemma: S1 |- P1,
refute0: Reporters.ForShape.Refute0[|<<-[P1, N], MSG],
msg: EmitError[MSG]
): |<<-[S1, N] =>> LeafShape = {
???
}
}
object |<<- extends NamedWith_Imp0 {
import org.shapesafe.core.shape.ProveShape.Factory._
implicit def simplify[
S1 <: Shape,
P1 <: LeafShape,
N <: Names,
HO <: HList
](
implicit
lemma: S1 |- P1,
zip: ZipWithKeys.Aux[N#Static, P1#_Dimensions#Static, HO],
// zip2: ErrorIfNotFound[
// ZipWithKeys.Aux[N#Static, P1#_Dimensions#Static, HO],
// "ABC"
// // Refute0[|<<-[P1, N]]
// ],
// TODO: why this can't work?
toShape: LeafShape.FromRecord.Case[HO]
): |<<-[S1, N] =>> toShape.Out = {
forAll[|<<-[S1, N]].=>> { src =>
val keys: N#Static = src.newNames.static
val p1: P1 = lemma.valueOf(src.s1)
val values: P1#_Dimensions#Static = p1.dimensions.static
val zipped: HO = values.zipWithKeys(keys)(zip)
LeafShape.FromRecord(zipped)
}
}
// TODO: DEAD LOOP!
// implicit def axiom[
// P1 <: LeafShape,
// N <: Names,
// HO <: HList,
// O <: LeafShape
// ](
// implicit
// zip: ZipWithKeys.Aux[N#Keys, P1#_Dimensions#Static, HO],
// toShape: LeafShape.FromRecord.==>[HO, O]
// ): WithNames[P1, N] =>> O = {
// from[WithNames[P1, N]].=>> { src =>
// val keys: N#Keys = src.newNames.keys
// val p1: P1 = src.s1
//
// val values: P1#_Dimensions#Static = p1.dimensions.static
//
// val zipped: HO = values.zipWithKeys(keys)
// LeafShape.FromRecord(zipped)
// }
// }
//
// implicit def theorem[
// S1 <: Shape,
// N <: Names,
// P1 <: LeafShape,
// O <: LeafShape
// ](
// implicit
// lemma1: S1 ~~> P1,
// lemma2: WithNames[P1, N] --> O
// ): WithNames[S1, N] =>> O = {
// from[WithNames[S1, N]].=>> { src =>
// val p1: P1 = lemma1.valueOf(src.s1)
//
// lemma2.valueOf(
// src.copy(p1)
// )
// }
// }
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/shape/Indices.scala | package org.shapesafe.core.shape
import org.shapesafe.core.tuple.{CanInfix_><, StaticTuples, TupleSystem}
import scala.language.implicitConversions
trait Indices extends IndicesMagnet with Indices.proto.Tuple {
final override type AsIndices = this.type
final override def asIndices: Indices.this.type = this
}
object Indices extends TupleSystem with CanInfix_>< {
type UpperBound = Index
object proto extends StaticTuples.Total[UpperBound] with CanInfix_>< {}
type Tuple = Indices
class Eye extends proto.Eye with Indices
lazy val Eye = new Eye
class ><[
TAIL <: Indices,
HEAD <: UpperBound
](
override val tail: TAIL,
override val head: HEAD
) extends proto.><[TAIL, HEAD](tail, head)
with Tuple {
override type PeekHead = Head
}
implicit def consAlways[TAIL <: Tuple, HEAD <: UpperBound] = {
Cons.from[TAIL, HEAD].to { (tail, head) =>
new ><(tail, head)
}
}
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/debugging/DebugSymbol.scala | package org.shapesafe.core.debugging
trait DebugSymbol {
type _AsOpStr
}
object DebugSymbol {
trait On1 extends DebugSymbol {
trait On[A] {}
}
trait On2 extends DebugSymbol {
trait On[A, B] {}
}
trait Require extends DebugSymbol {
type Complement <: Require
// implicitly[this.type <:< Complement#Complement]
}
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/ProofSystem.scala | <reponame>ithinkicancode/shapesafe<gh_stars>1-10
package org.shapesafe.core
import scala.language.implicitConversions
/**
* This trait forms the backbone of compile-time reasoning and should reflect our best effort in reproducing
* Curry-Howard isomorphism with scala compiler (regardless of how ill-suited it is), expecting drastic changes in case
* the implicit search algorithm was improved
* @tparam _OUB upper bound of output
*/
// TODO: If Poly1 works smoothly it could totally supersedes this class, too bad the assumed compiler bug made it necessary
trait ProofSystem[_OUB] extends Propositional[_OUB] with ProofScope { // TODO: no IUB?
type OUB = _OUB
final val root: this.type = this
trait Proof[-I, +P <: Consequent] {
def apply(v: I): P
final def valueOf(v: I): P#Domain = apply(v).value
}
def forAll[I]: Factory[I] = new Factory[I] {}
final def forValue[I](v: I): Factory[I] = forAll[I]
object Factory {
trait =>>^^[-I, +P <: Consequent] extends Proof[I, P]
/**
* Logical implication: If I is true then P is definitely true (or: NOT(I) /\ P = true)
* NOT material implication! If I can be immediately refuted then it implies NOTHING! Not even itself.
*
* In fact, any [[Arity]] or [[Shape]] that cannot be refuted at compile-time should subclass [[VerifiedArity]]
* or [[VerifiedShape]], which implies itself
*
* Programmer must ensure that no implicit subclass is defined for immediately refutable conjectures
*
* the symbol =>> is there to stress that it represents 2 morphism:
*
* - value v --> value apply(v)
*
* - domain I --> domain O
* @tparam I src type
* @tparam O tgt type
*/
trait =>>[-I, O <: OUB] extends =>>^^[I, root.Term.^[O]] {}
}
trait Factory[I] {
import Factory._
def =>>^^[P <: Consequent](_fn: I => P): I =>>^^ P = new (I =>>^^ P) {
override def apply(v: I): P = _fn(v)
}
def =>>[O <: OUB](_fn: I => O): I =>> O = new (I =>> O) {
// override def valueOf(v: I): O = _fn(v)
override def apply(v: I): root.Term.^[O] = root.Term.^[O](_fn(v))
}
def summon[O <: OUB](
implicit
prove: I |- O
): I |- O = prove
def to[O <: OUB] = new To[O]
class To[OB <: OUB] {
def summon[O <: OB](
implicit
prove: I |- O
): I |- O = prove
}
}
class SubScope extends ProofScope.ChildScope(this)
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/APISystem.scala | package org.shapesafe.core
import scala.language.implicitConversions
/**
* A class of which each instance contains an inner dependent type and an instance of such type, generally used for:
*
* - functions where generic type parameters are impossible or too verbose to define
* (e.g. the native implicit scope of a generic type parameter cannot be included automatically)
*
* - instance of which peer type needs to be used
*
* defined to circumvent the lack of "peer type" or "tightest non-singleton self-type" in scala.
*
* using singleton type `this.type` directly is incompatible with invariant type parameters and causes fuzzy error messages
*
* using whitebox macro maybe helpful (see NonSingletonTUB in macro package) which I'm skeptical, it makes dotty upgrade much harder
*
* other alternatives are F-bounded polymorphism and type classes, both of which are too verbose/not powerful enough
*/
trait APISystem {
type Bound
// TODO: this is not used at the moment, ArityAPI may extend ShapeAPI which cause diamond inheritance problem of the same member
trait API {
type Inner <: Bound
def inner: Inner
}
object API {
type Aux[I <: Bound] = API { type Internal = I }
}
implicit final def unbox[T <: API](v: T): v.Inner = v.inner
// implicit final def box[T <: API](v: T): API.Aux[Bound] = create(v)
def create[I <: Bound](internal: I): API.Aux[I]
trait APICompanion {}
}
object APISystem {}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/shape/ProveShape.scala | package org.shapesafe.core.shape
import org.shapesafe.core.ProofSystem
object ProveShape extends ProofSystem[Shape] {}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/shape/unary/GetSubscript.scala | <reponame>ithinkicancode/shapesafe<filename>core/src/main/scala/org/shapesafe/core/shape/unary/GetSubscript.scala
package org.shapesafe.core.shape.unary
import org.shapesafe.core.Poly1Base
import org.shapesafe.core.arity.Arity
import org.shapesafe.core.axis.Axis
import org.shapesafe.core.axis.Axis.:<<-
import org.shapesafe.core.debugging.Expressions.Expr
import org.shapesafe.core.debugging.{DebugUtil, Expressions, OpStrs, Reporters}
import org.shapesafe.core.shape.LeafShape.><
import org.shapesafe.core.shape._
import org.shapesafe.m.viz.VizCTSystem.EmitError
import shapeless.ops.hlist.At
import shapeless.ops.record.Selector
import shapeless.{Nat, Witness}
case class GetSubscript[ // last step of einsum, contract, transpose, etc.
S1 <: Shape,
I <: Index
](
s1: S1 with Shape,
index: I
) extends Conjecture1.^[S1] {
override type _AsOpStr = OpStrs.Infix[S1, " GetSubscript ", I]
override type _AsExpr = Expressions.GetSubscript[Expr[S1], Expr[I]]
override type _Refute = "Index not found"
}
trait GetSubscript_Imp0 {
import ProveShape._
import Factory._
implicit def refute[
S1 <: Shape,
P1 <: LeafShape,
I <: Index,
MSG
](
implicit
lemma1: S1 |- P1,
refute0: Reporters.ForShape.Refute0[GetSubscript[P1, I], MSG],
msg: EmitError[MSG]
): GetSubscript[S1, I] =>> LeafShape = {
???
}
}
object GetSubscript extends GetSubscript_Imp0 {
import ProveShape._
import Factory._
implicit def simplify[
S1 <: Shape,
P1 <: LeafShape,
I <: Index,
O <: Axis
](
implicit
lemma1: S1 |- P1,
lemma2: Premise.==>[GetSubscript[P1, I], O]
): GetSubscript[S1, I] =>> (LeafShape.Eye >< O) = {
ProveShape.forAll[GetSubscript[S1, I]].=>> { v =>
val p1: P1 = lemma1.valueOf(v.s1)
val vv: GetSubscript[P1, I] = v.copy(s1 = p1)
Shape appendInner lemma2(vv)
}
}
object Premise extends Poly1Base[GetSubscript[_, _], Axis] {
implicit def byName[
P1 <: LeafShape,
N <: String,
A <: Arity
](
implicit
_selector: Selector.Aux[P1#Record, N, A]
): GetSubscript[P1, Index.Name[N]] ==> (A :<<- N) = {
forAll[GetSubscript[P1, Index.Name[N]]].==> { v =>
val p1: P1 = v.s1
val arity: A = _selector(p1.record)
val w: Witness.Aux[N] = v.index.w
(arity.^ :<<- w)
}
}
implicit def byII[
P1 <: LeafShape,
N <: Nat,
O <: Axis
](
implicit
_at: At.Aux[P1#Static, N, O]
): GetSubscript[P1, Index.I_th[N]] ==> O = {
forAll[GetSubscript[P1, Index.I_th[N]]].==> { v =>
val p1 = v.s1
_at(p1.static)
}
}
}
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/shape/args/ApplyNats.scala | <filename>core/src/main/scala/org/shapesafe/core/shape/args/ApplyNats.scala
package org.shapesafe.core.shape.args
import org.shapesafe.core.shape.LeafShape
import org.shapesafe.core.shape.ShapeAPI.^
import shapeless.ops.hlist.Reverse
import shapeless.{HList, NatProductArgs}
trait ApplyNats extends ApplyArgs with NatProductArgs {
// TODO: should the reverse be justified?
def applyNatProduct[H1 <: HList, H2 <: HList](
v: H1
)(
implicit
reverse: Reverse.Aux[H1, H2],
lemma: fromHList.Case[H2]
): Result[lemma.Out] = {
val out = lemma.apply(v.reverse)
toResult(out)
}
}
object ApplyNats {
trait ToShape extends ApplyNats {
type OUB = LeafShape
override val fromHList: LeafShape.FromNats.type = LeafShape.FromNats
override type Result[T <: OUB] = ^[T]
override def toResult[T <: OUB](v: T) = v.^
}
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/debugging/Reporters.scala | package org.shapesafe.core.debugging
import org.shapesafe.core.debugging.DebugUtil.{CanRefute, Refute, Stripe}
import org.shapesafe.core.debugging.Expressions.Expr
import org.shapesafe.core.debugging.OpStrs.OpStr
import org.shapesafe.core.{Poly1Base, ProofScope}
import org.shapesafe.m.viz.ExpressionVizCT
import org.shapesafe.m.viz.VizCTSystem.{EmitError, EmitInfo}
import singleton.ops.{+, XString}
// TODO: this weird abuse of implicit priority is due to the fact that
// singleton-ops RequireMsg only cache the last message in the implicit search
// so step 1 is isolated to avoid triggering RequireMsg prematurely
class Reporters[
PS <: ProofScope
](val scope: PS) {
import Reporters._
trait ExprProofReporter[IUB <: CanPeek, TGT <: scope.OUB with CanPeek] extends Reporter[IUB] {
import org.shapesafe.core.debugging.DebugUtil._
import scope._
trait Step1_Imp3 extends Poly1Base[Iub, XString] {
implicit def raw[A <: Iub, VA <: XString](
implicit
vizA: Expr2Str[Expr[A], VA],
mk: (CannotEval + VA + "\n") { type Out <: XString }
): A ==> mk.Out =
forAll[A].==>(_ => mk.value)
}
trait Step1_Imp2 extends Step1_Imp3 {
implicit def eval[
A <: Iub,
S <: TGT,
VA <: XString,
VS <: XString
](
implicit
lemma: A |- S,
vizA: Expr2Str[Expr[A], VA],
vizS: Expr2Str[Expr[S], VS],
mk: (PEEK.T + VS + EntailsLF + VA + "\n") { type Out <: XString }
): A ==> mk.Out =
forAll[A].==>(_ => mk.value)
}
trait Step1_Imp1 extends Step1_Imp2 {
implicit def alreadyTarget[
S <: TGT with Iub,
VS <: XString
](
implicit
vizS: Expr2Str[Expr[S], VS],
op: (PEEK.T + VS + "\n") { type Out <: XString }
): S ==> op.Out =
forAll[S].==>(_ => op.value)
}
override object Step1 extends Step1_Imp1
}
trait OpProofReporter[IUB <: CanPeek, TGT <: scope.OUB with CanPeek] extends Reporter[IUB] {
import org.shapesafe.core.debugging.DebugUtil._
import scope._
trait Step1_Imp3 extends Poly1Base[Iub, XString] {
implicit def raw[A <: Iub](
implicit
mk: (CannotEval + OpStr[A] + "\n") { type Out <: XString }
): A ==> mk.Out =
forAll[A].==>(_ => mk.value)
}
trait Step1_Imp2 extends Step1_Imp3 {
implicit def eval[
A <: Iub,
S <: TGT
](
implicit
lemma: A |- S,
mk: (PEEK.T + OpStr[S] + EntailsLF + OpStr[A] + "\n") { type Out <: XString }
): A ==> mk.Out =
forAll[A].==>(_ => mk.value)
}
trait Step1_Imp1 extends Step1_Imp2 {
implicit def alreadyTarget[S <: TGT with Iub](
implicit
mk: (PEEK.T + OpStr[S] + "\n") { type Out <: XString }
): S ==> mk.Out =
forAll[S].==>(_ => mk.value)
}
override object Step1 extends Step1_Imp1
}
trait PeekReporter[IUB <: CanPeek, TGT <: scope.OUB with CanPeek] extends ExprProofReporter[IUB, TGT] {
override type EmitMsg[T] = EmitInfo[T]
}
trait InterruptReporter[IUB <: CanPeek, TGT <: scope.OUB with CanPeek] extends ExprProofReporter[IUB, TGT] {
override type EmitMsg[T] = EmitError[T]
}
}
object Reporters {
type Expr2Str[I, O <: String] = ExpressionVizCT.NoTree.InfoOf.Aux[I, O]
trait Reporter[IUB] extends Poly1Base[IUB, Unit] {
type EmitMsg[T]
final type Iub = IUB
val Step1: Poly1Base[IUB, XString]
case class From[IN <: IUB](v: IN) {
def getReportMsg[
SS <: XString
](
implicit
step1: Step1.Case.Aux[IN, SS]
): SS = {
step1.apply(v)
}
}
implicit def attempt[
IN <: IUB,
SS <: XString
](
implicit
step1: Step1.Case.Aux[IN, SS],
step2: EmitMsg[SS]
): IN ==> Unit = forAll[IN].==> { _ =>
// val emit = new EmitMsg[SS, EmitMsg.Error]
// emit.emit
}
}
trait Refutes {
type TryStripe
// TODO: remove, obsolete design
// type Refute0[SELF <: CanPeek with CanRefute] =
// Refute[SELF] +
// TryStripe +
// OpStr[SELF]
type Refute0[SELF <: CanPeek with CanRefute, O] = Refute0.Case.Aux[SELF, O]
object Refute0 extends Poly1Base[CanPeek with CanRefute, Any] {
implicit def get[I <: _IUB, V <: String](
implicit
expr2Str: Reporters.Expr2Str[I#_AsExpr, V]
): I ==> (
Refute[I] +
TryStripe +
V
) = forAll[I].==> { _ =>
null
}
}
}
object ForArity extends Refutes {
type TryStripe = "\n\n" + Stripe["... when proving arity"]
// type Refute1[SELF <: CanPeek with CanRefute, C1] =
// Refute[SELF] +
// TryArity +
// OpStr[SELF] +
// FROM1.T +
// C1
//
// type Refute2[SELF <: CanPeek with CanRefute, C1, C2] =
// OpStr[SELF] +
// TryArity +
// Refute[SELF] +
// FROM2.T +
// C1 +
// "\n\n" +
// C2
}
object ForShape extends Refutes {
type TryStripe = "\n\n" + Stripe["... when proving shape"]
}
}
|
ithinkicancode/shapesafe | core/src/test/scala/org/shapesafe/core/shape/unary/ReorderSpec.scala | package org.shapesafe.core.shape.unary
import org.shapesafe.graph.commons.util.viz.TypeViz
import org.shapesafe.BaseSpec
import org.shapesafe.core.arity.{Arity, ArityAPI}
import org.shapesafe.core.shape.Index.Name
import org.shapesafe.core.shape.{Indices, Names, Shape}
import org.shapesafe.m.viz.TypeVizCT
class ReorderSpec extends BaseSpec {
val s1 = Shape >|<
(Arity(1) :<<- "x") >|<
Arity(2) :<<- "y" >|<
Arity(3) :<<- "z"
describe("Premise") {
it("eye") {
val ss = Reorder(s1, Indices.Eye)
val rr = Reorder.Premise.apply(ss)
typeInferShort(rr).shouldBe("LeafShape.Eye")
}
it("1") {
val ss = Reorder(s1, Indices >< Name("z"))
val rr = Reorder.Premise.apply(ss)
typeInferShort(rr).shouldBe(
"""LeafShape.Eye >< (Const.Literal[Int(3)] :<<- String("z"))"""
)
}
it("2") {
val ss = Reorder(s1, Indices >< Name("z") >< Name("y"))
val rr = Reorder.Premise.apply(ss)
val s2 = Shape >|<
Arity(3) :<<- "z" >|<
Arity(2) :<<- "y"
TypeViz.infer(s2.shape).should_=:=(TypeViz.infer(rr))
typeInferShort(rr).shouldBe(
"""
|LeafShape.Eye >< (Const.Literal[Int(3)] :<<- String("z")) >< (Const.Literal[Int(2)] :<<- String("y"))
|""".stripMargin
)
}
it("3") {
val ss = Reorder(s1, Indices >< Name("z") >< Name("y") >< Name("x"))
val rr = Reorder.Premise.apply(ss)
typeInferShort(rr).shouldBe(
"""
|LeafShape.Eye >< (Const.Literal[Int(3)] :<<- String("z")) >< (Const.Literal[Int(2)] :<<- String("y")) >< (Const.Literal[Int(1)] :<<- String("x"))
|""".stripMargin
)
}
}
it("with names") {
val r = Reorder(s1, Indices >< Name("z") >< Name("y")).^
r.eval.toString.shouldBe(
"""
|➊ ><
| 3:Literal :<<- z ><
| 2:Literal :<<- y
|""".stripMargin
)
}
it(" ... alternatively") {
val r = Reorder(s1, Names >< "z" >< "y").^
r.eval.toString.shouldBe(
"""
|➊ ><
| 3:Literal :<<- z ><
| 2:Literal :<<- y
|""".stripMargin
)
}
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/arity/binary/UncheckedDomain_Imp0.scala | package org.shapesafe.core.arity.binary
import org.shapesafe.core.arity.Arity
import org.shapesafe.core.arity.Unchecked
import org.shapesafe.core.arity.ProveArity._
import scala.language.existentials
trait UncheckedDomain_Imp0 {
case class D2[
A1 <: Arity,
A2 <: Arity,
TC <: Arity
]()(
implicit
val bound1: A1 |-< Unchecked,
val bound2: A2 |-< TC
) extends UncheckedDomain[A1, A2] {
final type O1 = Unchecked
final type Tightest = TC
override def selectTightest(a1: A1, a2: A2): Tightest = bound2.valueOf(a2)
}
implicit def d2[
A1 <: Arity,
A2 <: Arity,
TC <: Arity
](
implicit
bound1: A1 |-< Unchecked,
bound2: A2 |-< TC
): D2[A1, A2, TC] = D2()
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/axis/AxisLike.scala | package org.shapesafe.core.axis
import org.shapesafe.core.arity.ArityAPI
import org.shapesafe.core.arity.ops.HasArity
import org.shapesafe.core.axis.Axis.:<<-
import shapeless.Witness
trait AxisLike extends HasArity {
val nameSingleton: Witness.Lt[String]
final type Name = nameSingleton.T
final def name: Name = nameSingleton.value
def nameless: ArityAPI.^[_Arity] = arity.^
def namedT[S <: String](
implicit
name: Witness.Aux[S]
): _Arity :<<- S = Axis(nameless, name)
def named(name: Witness.Lt[String]): _Arity :<<- name.T = {
namedT(name)
}
def :<<-(name: Witness.Lt[String]): _Arity :<<- name.T = namedT(name)
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/shape/unary/ContractByName.scala | package org.shapesafe.core.shape.unary
import org.shapesafe.core.arity.ops
import org.shapesafe.core.arity.ops.ArityOps
import org.shapesafe.core.shape.Shape
import scala.language.implicitConversions
// TODO: useless, EinSum handles every thing, remove?
case class ContractByName[
S1 <: Shape
](
override val s1: S1 with Shape
) extends ContractByName.op._On[S1] {}
object ContractByName {
val op: ops.ArityOps.==!._SquashByName.type = ArityOps.==!._SquashByName
def indexing: op._Indexing.type = op._Indexing
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/axis/RecordUpdater.scala | <reponame>ithinkicancode/shapesafe
package org.shapesafe.core.axis
import org.shapesafe.core.Poly1Base
import org.shapesafe.core.axis.Axis.UB_->>
import shapeless.HList
trait RecordUpdater extends Poly1Base[(HList, UB_->>), HList] {}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/axis/OldNameUpdaterSystem.scala | package org.shapesafe.core.axis
import org.shapesafe.core.arity.binary.Op2Like
import org.shapesafe.core.arity.{Arity, LeafArity}
import org.shapesafe.core.axis.Axis.->>
import shapeless.ops.record.{Modifier, Selector}
import shapeless.syntax.RecordOps
import shapeless.{::, HList, Witness}
class OldNameUpdaterSystem[OP <: Op2Like](val op: OP) {
trait Appender extends RecordUpdater {
// TODO: should be a Poly2?
// TODO: merge into EinSumIndexed.Cons
import org.shapesafe.core.arity.ProveArity._
implicit def ifOldName[
OLD <: HList,
N <: String,
A1 <: Arity,
A2 <: Arity,
O <: LeafArity
](
implicit
name: Witness.Aux[N],
selector: Selector.Aux[OLD, N, A1],
lemma: op.On[A1, A2] |- O
): (OLD, N ->> A2) ==> ((N ->> O) :: OLD) = {
forAll[(OLD, N ->> A2)].==> {
case (old, field) =>
import shapeless.record._
val d1 = old.apply(name)
val d2 = field: A2
val oped = op.on(d1.^, d2.^)
val d_new: O = lemma.apply(oped).value
d_new.asInstanceOf[N ->> O] :: old
}
}
}
// object Appender extends Appender
trait Squasher extends RecordUpdater {
// TODO: should be a Poly2?
// TODO: merge into EinSumIndexed.Cons
import org.shapesafe.core.arity.ProveArity._
implicit def ifOldName[
OLD <: HList,
N <: String,
A1 <: Arity,
A2 <: Arity,
O <: LeafArity
](
implicit
name: Witness.Aux[N],
selector: Selector.Aux[OLD, N, A1],
lemma: op.On[A1, A2] |- O,
modifier: Modifier[OLD, N, A1, O]
): (OLD, N ->> A2) ==> modifier.Out = {
forAll[(OLD, N ->> A2)].==> {
case (old, field) =>
val oldView: RecordOps[OLD] = new RecordOps(old)
val d1 = oldView.apply(name): A1
val d2 = field: A2
val d_new: O = lemma.apply(op.on(d1.^, d2.^)).value
val result = modifier.apply(old, _ => d_new)
result
}
}
}
// object Squasher extends Squasher
}
object OldNameUpdaterSystem {}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/shape/unary/CheckDistinct.scala | package org.shapesafe.core.shape.unary
import org.shapesafe.core.debugging.Expressions.Expr
import org.shapesafe.core.debugging.{Expressions, OpStrs, Reporters}
import org.shapesafe.core.shape.{LeafShape, ProveShape, Shape}
import org.shapesafe.m.viz.VizCTSystem.EmitError
// all names must be distinctive - no duplication allowed
case class CheckDistinct[
S1 <: Shape
](
s1: S1 with Shape
) extends Conjecture1.^[S1] {
override type _AsOpStr = OpStrs.PrefixW1["Distinct", S1]
override type _AsExpr = Expressions.CheckDistinct[Expr[S1]]
override type _Refute = "Names has duplicates"
}
trait CheckDistinct_Imp0 {
import ProveShape.Factory._
import ProveShape._
implicit def refute[
S1 <: Shape,
P1 <: LeafShape,
MSG
](
implicit
lemma: S1 |- P1,
refute0: Reporters.ForShape.Refute0[CheckDistinct[P1], MSG],
msg: EmitError[MSG]
): CheckDistinct[S1] =>> LeafShape = {
null
}
}
object CheckDistinct extends CheckDistinct_Imp0 {
import ProveShape.Factory._
import ProveShape._
implicit def simplify[
S1 <: Shape,
P1 <: LeafShape
](
implicit
lemma: S1 |- P1,
indexing: _Indexing.Case[P1#Record]
): CheckDistinct[S1] =>> P1 = {
ProveShape.forAll[CheckDistinct[S1]].=>> { v =>
lemma.valueOf(v.s1)
}
}
object _Indexing extends UnaryIndexingFn.Distinct
type _Indexing = _Indexing.type
}
|
ithinkicancode/shapesafe | macro/src/test/scala/org/shapesafe/m/EmitMsgSpec.scala | <reponame>ithinkicancode/shapesafe
package org.shapesafe.m
import org.shapesafe.graph.commons.testlib.BaseSpec
import singleton.ops.ToString
class EmitMsgSpec extends BaseSpec {
import EmitMsgSpec._
it("weakly") {
shouldNotCompile(
"""emitError.weakly["ABC"]""",
"ABC"
)
type RR = "ABC"
type TT = EmitMsg[RR, EmitMsg.Error]
// shouldNotCompile(
// """implicitly[TT]""",
// "ABC"
// )
}
it("byOp") {
type T = ToString["ABC"]
// val op = implicitly[ToString["ABC"]]
shouldNotCompile(
"""emitError.byOp[ToString["ABC"]]""",
"ABC"
)
shouldNotCompile(
"""emitError.byOp[T]""",
"ABC"
)
// val v = doOp[ToString["ABC"]]
// shouldNotCompile(
// """run[ToString["ABC"]]""",
// "ABC"
// )
}
// it("byTypeable") {
//
// type T = ToString["ABC"]
//
// shouldNotCompile(
// """emitError.byTypeable[ToString["ABC"]]""",
// "ABC"
// )
//
// shouldNotCompile(
// """emitError.byTypeable[T]""",
// "ABC"
// )
//
// def run[T](
// implicit
// op: Typeable[T]
// ): EmitMsg[T, EmitMsg.Error] = {
// emitError.byTypeable[T](op)
// }
//
// val v = run[ToString["ABC"]]
//
//// shouldNotCompile(
//// """run[ToString["ABC"]]""",
//// "ABC"
//// )
// }
it("can emit warning") { // TODO: how to de-verbose?
emitWarning.weakly["ABC"]
type TT = EmitMsg["ABC", EmitMsg.Warning]
implicitly[TT] //(EmitMsg.emit)
}
it("can emit info") {
emitInfo.weakly["ABC"]
type TT = EmitMsg["ABC", EmitMsg.Info]
implicitly[TT] //(EmitMsg.emit)
}
}
object EmitMsgSpec {
val emitError: EmitMsg.Level[EmitMsg.Error] = EmitMsg[EmitMsg.Error]
val emitWarning: EmitMsg.Level[EmitMsg.Warning] = EmitMsg[EmitMsg.Warning]
val emitInfo: EmitMsg.Level[EmitMsg.Info] = EmitMsg[EmitMsg.Info]
// val op = implicitly[ToString["ABC"]]
// TODO: blocked by inline feature: https://stackoverflow.com/questions/67526001/in-scala-can-a-function-be-defined-to-have-pass-by-ast-parameter-such-that-the
// def doOp[T <: Op](
// implicit
// op: T
// ): EmitMsg[T, EmitMsg.Error] = {
// emitError.byOp[T](op)
// }
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/shape/binary/DimensionWise.scala | package org.shapesafe.core.shape.binary
import org.shapesafe.graph.commons.util.HasOuter
import org.shapesafe.core.arity.Arity
import org.shapesafe.core.arity.binary.Op2Like
import org.shapesafe.core.debugging.Expressions.Expr
import org.shapesafe.core.debugging.{DebugSymbol, Expressions, OpStrs}
import org.shapesafe.core.shape.unary.UnaryIndexingFn
import org.shapesafe.core.shape.{LeafShape, ProveShape, Shape}
import shapeless.ops.hlist.Zip
import shapeless.{::, HList, HNil}
trait DimensionWise {
val op: Op2Like
type _Binary <: DebugSymbol.On2
// all names must be distinctive - no duplication allowed
trait _On[
S1 <: Shape,
S2 <: Shape
] extends Conjecture2.^[S1, S2]
with HasOuter {
override def outer: DimensionWise.this.type = DimensionWise.this
def s1: S1 with Shape
def s2: S2 with Shape
override type _AsOpStr = OpStrs.PrefixW2[_Binary#_AsOpStr, S1, S2]
override type _AsExpr = _Binary#On[Expr[S1], Expr[S2]]
}
object _On {
import ProveShape.Factory._
import ProveShape.|-
implicit def simplify[
S1 <: Shape,
S2 <: Shape,
P1 <: LeafShape,
P2 <: LeafShape,
HO <: HList
](
implicit
lemma1: S1 |- P1,
lemma2: S2 |- P2,
zip: Zip.Aux[P1#_Dimensions#Static :: P2#_Dimensions#Static :: HNil, HO],
toShape: _Indexing.ToShape.Case[HO]
): _On[S1, S2] =>> toShape.Out = {
ProveShape.forAll[_On[S1, S2]].=>> { v =>
val p1 = lemma1.valueOf(v.s1)
val p2 = lemma2.valueOf(v.s2)
val zipped = zip.apply(p1.dimensions.static :: p2.dimensions.static :: HNil)
val result = toShape.apply(zipped)
result
}
}
}
case class On[
S1 <: Shape,
S2 <: Shape
](
override val s1: S1 with Shape,
override val s2: S2 with Shape
) extends _On[S1, S2] {}
// TODO: now sure if it is too convoluted, should it extends BinaryIndexingFn?
object _Indexing extends UnaryIndexingFn {
import org.shapesafe.core.arity.ProveArity.|-
implicit def cons[
TI <: HList,
TO <: HList,
A1 <: Arity,
A2 <: Arity,
AO <: Arity
](
implicit
consTail: TI ==> TO,
proveArity: op.On[A1, A2] |- AO
): ((A1, A2) :: TI) ==> (AO :: TO) = {
forAll[(A1, A2) :: TI].==> { v =>
val ti = v.tail
val to = consTail(ti)
val hi = v.head
val ho = proveArity.valueOf(op.on(hi._1.^, hi._2.^))
ho :: to
}
}
}
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/Propositional.scala | package org.shapesafe.core
import scala.language.implicitConversions
trait Propositional[OUB] {
trait Term extends Serializable {
type Domain <: OUB
def value: Domain
}
case object Term {
type Aux[O <: OUB] = Term {
type Domain = O
}
type Lt[+O <: OUB] = Term {
type Domain <: O
}
trait Of[O <: OUB] extends Term {
final type Domain = O
}
case class ^[O <: OUB](val value: O) extends Of[O]
}
}
//object Propositional {
//
// trait PropositionBase extends Serializable {
//
// type Codomain
// def value: Codomain
// }
//}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/debugging/OpStrs.scala | package org.shapesafe.core.debugging
import org.shapesafe.core.debugging.DebugUtil._
import singleton.ops.+
object OpStrs {
type OpStr[T <: CanPeek] = StrOrRaw[
T#_AsOpStr
]
// TODO: brackets?
type Infix[T1 <: CanPeek, S, T2 <: CanPeek] =
OpStr[T1] + StrOrRaw[S] + OpStr[T2]
type PrefixW1[S, T <: CanPeek] =
StrOrRaw[S] + Br[OpStr[T]]
type PrefixW2[S, T1 <: CanPeek, T2 <: CanPeek] =
StrOrRaw[S] + Br[OpStr[T1] + ", " + OpStr[T2]]
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/util/Constraint.scala | package org.shapesafe.core.util
import shapeless.HList
import shapeless.ops.hlist
import scala.language.implicitConversions
trait Constraint {}
object Constraint {
import org.shapesafe.graph.commons.util.reflect.ScalaReflection._
// TODO: why reinventing the wheel? shapeless LUBConstraint is the same
case class ElementOfType[Data <: HList, Element: TypeTag]() extends Constraint {
val ttg: TypeTag[Element] = universe.typeTag[Element]
}
object ElementOfType {
// TODO: remove, not efficient
// implicit def observe0[T: TypeTag]: OfMemberType[HNil, T] = OfMemberType[HNil, T]()
//
// implicit def observeN[T: TypeTag, Prev <: HList](
// implicit assuming: OfMemberType[Prev, T]): OfMemberType[T :: Prev, T] =
// OfMemberType[T :: Prev, T]()
// implicit def observe0[Element: TypeTag]: OfElementType[HNil, Element] = OfElementType[HNil, Element]()
implicit def observe[Data <: HList, Element: TypeTag](
implicit
assuming: hlist.ToArray[Data, Element]
): ElementOfType[Data, Element] =
ElementOfType[Data, Element]()
}
}
|
ithinkicancode/shapesafe | core/src/test/scala/org/shapesafe/core/RequireMsgSpike.scala | <reponame>ithinkicancode/shapesafe
package org.shapesafe.core
import singleton.ops.RequireMsg
object RequireMsgSpike {
trait HasM {
type M = Int
}
trait Foo[T]
object Foo {
implicit def ev[T <: HasM](
implicit
r: RequireMsg[false, "Bad Type: ${T#M}"]
): Foo[T] = ???
}
// implicitly[Foo[HasM]] //error: Bad Type:
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/axis/Axis.scala | <reponame>ithinkicancode/shapesafe
package org.shapesafe.core.axis
import org.shapesafe.graph.commons.util.IDMixin
import org.shapesafe.core.arity.{Arity, ArityAPI}
import org.shapesafe.core.debugging.Expressions.Expr
import org.shapesafe.core.debugging.{CanPeek, DebugUtil, Expressions, OpStrs}
import shapeless.Witness
import shapeless.labelled.FieldType
import scala.language.implicitConversions
trait Axis extends AxisLike with IDMixin with CanPeek {
//TODO:; can be a subclass of shapeless KeyTag
final type Field = FieldType[Name, _Arity]
final def asField: Field = arity.asInstanceOf[Field]
type _Axis >: this.type <: Axis
final def axis: _Axis = this: _Axis
override protected lazy val _id: Any = (arity, name)
}
object Axis {
type ->>[N <: String, D] = FieldType[N, D]
type UB_->> = (_ <: String) ->> Arity
// TODO: N can be eliminated
final class :<<-[
A <: Arity,
N <: String
](
val arity: A,
val nameSingleton: Witness.Aux[N]
) extends Axis
// with KeyTag[N, D :<<- N]
// TODO: remove? FieldType[] has some black magic written in macro
{
type _Arity = A
type _Axis = _Arity :<<- Name
trait CanPeekName extends CanPeek {
override type _AsOpStr = Name
override type _AsExpr = Name
}
type _AsOpStr = DebugUtil.Br[OpStrs.Infix[A, " :<<- ", CanPeekName]]
override type _AsExpr = Expressions.:<<-[Expr[A], Expr[CanPeekName]]
override lazy val toString: String = {
if (name.isEmpty) s"$arity"
else s"$arity :<<- $name"
}
}
def apply(
value: ArityAPI,
name: Witness.Lt[String]
): :<<-[value._Arity, name.T] = {
new :<<-(value.arity, name)
}
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/arity/Unprovable.scala | package org.shapesafe.core.arity
object Unprovable extends Arity {
override def runtimeArity: Int = throw new UnsupportedOperationException(s"cannot verified an Unprovable")
override type _AsExpr = "???"
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/arity/ArityConjecture.scala | package org.shapesafe.core.arity
import org.shapesafe.graph.commons.util.ProductTree
import org.shapesafe.core.debugging.DebugUtil.CanRefute
trait ArityConjecture extends Arity.Verifiable with CanRefute with ProductTree {
// final override def toString: String = treeString
}
object ArityConjecture {}
|
ithinkicancode/shapesafe | macro/src/main/scala/org/shapesafe/m/viz/KindVizCT.scala | <gh_stars>1-10
package org.shapesafe.m.viz
import org.shapesafe.graph.commons.util.reflect.format.{EnableOvrd, Formats}
import org.shapesafe.graph.commons.util.viz.TypeVizFormat
import scala.language.experimental.macros
case object KindVizCT extends VizCTSystem {
override def format: TypeVizFormat = Formats.KindName.HidePackage.recursively
override def useTree: Boolean = true
implicit def infoOf[I]: InfoOf[I] = macro VizCTSystem.Macros.infoOf[I, this.type]
case object NoTree extends Updated {
override def useTree: Boolean = false
implicit def infoOf[I]: InfoOf[I] = macro VizCTSystem.Macros.infoOf[I, this.type]
}
case object Ovrd extends Updated {
override def format: TypeVizFormat = EnableOvrd(outer.typeFormat)
override def useTree: Boolean = false
implicit def infoOf[I]: InfoOf[I] = macro VizCTSystem.Macros.infoOf[I, this.type]
}
}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/tuple/CanInfix_><.scala | package org.shapesafe.core.tuple
import scala.language.implicitConversions
trait CanInfix_>< extends CanCons {
_self: TupleSystem =>
trait InfixMixin[SELF <: Tuple] {
def self: SELF
def ><[
HEAD <: UpperBound
](
head: HEAD
)(
implicit
cons: Cons[SELF, HEAD]
): cons.ConsResult = cons(self, head)
}
implicit class Infix[SELF <: Tuple](val self: SELF) extends InfixMixin[SELF] {}
implicit def toEyeInfix(s: this.type): Infix[Eye] = Infix[Eye](Eye)
}
object CanInfix_>< {}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/shape/ShapeConjecture.scala | package org.shapesafe.core.shape
import org.shapesafe.graph.commons.util.ProductTree
import org.shapesafe.core.debugging.DebugUtil.CanRefute
trait ShapeConjecture extends Shape with CanRefute with ProductTree {
final override def toString: String = treeString
}
object ShapeConjecture {}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/ProofAPI.scala | <filename>core/src/main/scala/org/shapesafe/core/ProofAPI.scala<gh_stars>1-10
//package org.shapesafe.core
//
//import org.shapesafe.core.arity.ArityAPI
//import org.shapesafe.core.debugging.{CanPeek, Reporters}
//
//trait ProofAPI {
//
// val scope: ProofScope
// import scope._
//
// final type VerifyTo = scope.OUB with CanPeek
// type EvalTo <: VerifyTo
//
// object _Reporters extends Reporters[scope.type](scope)
//
// object PeekReporter extends _Reporters.PeekReporter[VerifyTo, EvalTo]
// object InterruptReporter extends _Reporters.InterruptReporter[VerifyTo, EvalTo]
//
// trait APIProto {
//
// type _Out <: VerifyTo
// def out: _Out
//
// def verify[
// O <: VerifyTo
// ](
// implicit
// prove: _Out |- O
// ): ArityAPI.^[O] = prove.apply(out).value.^
//
// def eval[
// O <: EvalTo
// ](
// implicit
// prove: _Out |- O
// ): ArityAPI.^[O] = verify(prove)
//
// def peek(
// implicit
// reporter: PeekReporter.Case[_Out]
// ): this.type = this
//
// def interrupt(
// implicit
// reporter: InterruptReporter.Case[_Out]
// ): this.type = this
//
// def reason[
// O <: EvalTo
// ](
// implicit
// reporter: PeekReporter.Case[_Out],
// prove: _Out |- O
// ): ArityAPI.^[O] = verify(prove)
// }
//
// type Aux[O <: VerifyTo] = APIProto {}
//
// def create[O <: VerifyTo](v: O):
//}
|
ithinkicancode/shapesafe | core/src/main/scala/org/shapesafe/core/arity/Arity.scala | <reponame>ithinkicancode/shapesafe
package org.shapesafe.core.arity
import org.shapesafe.core.arity
import org.shapesafe.core.arity.ArityAPI.^
import org.shapesafe.core.arity.Const.{Derived, Literal}
import org.shapesafe.core.arity.Utils.NatAsOp
import org.shapesafe.core.debugging.CanPeek
import shapeless.{Nat, Witness}
import scala.language.implicitConversions
import scala.util.Try
trait Arity extends CanPeek {
def runtimeArity: Int
final lazy val runtimeTry: Try[Int] = Try(runtimeArity)
lazy val valueStr: String = runtimeTry
.map(_.toString)
.getOrElse("???")
// .recover {
// case ee: Exception =>
// ee.getMessage
// }
// .get
lazy val fullStr: String = {
valueStr + ":" + this.getClass.getSimpleName
}
final override def toString: String = fullStr
}
object Arity {
trait Verifiable extends Arity {}
val Unprovable: ^[arity.Unprovable.type] = arity.Unprovable.^
implicit class Converters[A <: Arity](self: A) {
def ^ : ^[A] = ArityAPI.^(self)
}
def apply(w: Witness.Lt[Int]): ^[Literal[w.T]] = {
^(Literal.apply(w))
}
lazy val _0 = Arity(0)
lazy val _1 = Arity(1)
lazy val _2 = Arity(2)
lazy val _3 = Arity(3)
object FromNat {
def apply[N <: Nat](v: N)(
implicit
ev: NatAsOp[N]
): ^[Derived[NatAsOp[N], ev.OutInt]] = {
^(Derived.summon[NatAsOp[N]](ev))
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.