﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;

using Framework;
using System.Threading;
using System.Net;
using System.Web.Script.Serialization;
using System.IO;
using SchemaMonitor.json;
using SchemaMonitor.json.dict;
using System.Web;
using System.Collections.Specialized;

namespace SchemaMonitor
{
    public static class CEngine
    {
        #region Download (string/dict/page)

        public static string DownloadString(string url, WebClient wc)
        {
            try
            {
                return wc.DownloadString(url);
            }
            catch (System.Net.WebException wex)
            {
                if (null == wex.Response)
                    throw;
                var rs = wex.Response.GetResponseStream();
                if (null == rs)
                    throw;

                var sr = new StreamReader(rs);
                var s = sr.ReadToEnd();
                sr.Close();

                throw new Exception(s, wex);
            }
        }


        public static CMetadata DownloadMeta(string fullId, CUser u) { return DownloadMeta(fullId, u, new CWebClient(30000)); }
        public static CMetadata DownloadMeta(string fullId, CUser u, WebClient wc)
        {
            try
            {
                string url = u.UrlGetMetadata(fullId);
                var dict = DownloadDict(url, wc, new JavaScriptSerializer());

                if (dict.ContainsKey(CFieldList.METADATA)) 
                {
                    var meta = new CMetadata(dict.AsDict(CFieldList.METADATA), u, fullId);
                    if (meta.HasType && meta.HasFields)
                        return meta;
                }
            }
            catch (Exception ex)
            { }
            return null;
        }

        public static CDict DownloadDict(string url, WebClient wc)
        {
            return DownloadDict(url, wc, new JavaScriptSerializer());
        }
        public static CPage DownloadPage(CUser u, CType t, WebClient wc, JavaScriptSerializer jss)
        {
            return DownloadPage(t.UrlGetPage(u), wc, jss);
        }

        public static CDict DownloadDict(string url, WebClient wc, JavaScriptSerializer jss)
        {
            var json = DownloadString(url, wc);
            var dict = jss.Deserialize<dynamic>(json);
            return new CDict(dict);
        }
        public static CPage DownloadPage(string url, WebClient wc, JavaScriptSerializer jss)
        {
            var dict = DownloadDict(url, wc, jss);
            return new CPage(dict);
        }
		#endregion

		#region "Post"
		public class CAlbum
		{
			public string Name { get; set; }
			public string Message { get; set; }
		}
		public static CDict CreateAlbum(long userId, string userToken, string albumName, string albumDesc = null, WebClient wc = null, JavaScriptSerializer jss = null)
		{
			if (null == jss) jss = new JavaScriptSerializer();
			if (null == wc)  wc  = new CWebClient(30000);

			//Url
			var url = CSchema.FB.UrlCreateAlbum(userId, userToken);
			//url = string.Concat(url, "&name=", HttpUtility.UrlEncode(albumName));
			//if (null != albumDesc && albumDesc.Length > 0)
				//url = string.Concat(url, "&message=", HttpUtility.UrlEncode(albumDesc));

			//Data
			var a = new CAlbum();
			a.Name = albumName;
			a.Message = albumDesc;
			var json = jss.Serialize(a);

			try
			{
				wc.Headers[HttpRequestHeader.ContentType] = "text/json"; //"application/x-www-form-urlencoded";
				json = wc.UploadString(url, "POST", json);
			}
			catch
			{ }



			var nv = new NameValueCollection();
			nv.Add("name", albumName);
			if (null != albumDesc)
				nv.Add("message", albumDesc);

			return Upload(url, nv, wc, jss);

		}
		public static CDict Upload(string url, NameValueCollection data, WebClient wc, JavaScriptSerializer jss)
		{
			var bin = Upload(url, data, wc);
			var json = CBinary.BytesToString(bin);
			var dict = jss.Deserialize<dynamic>(json);
			return new CDict(dict);
		}

		public static CDict Upload(string url, string data, WebClient wc, JavaScriptSerializer jss)
		{
			var json = Upload(url, data, wc);
			var dict = jss.Deserialize<dynamic>(json);
			return new CDict(dict);
		}
		public static string Upload(string url, string data, WebClient wc)
		{
			return wc.UploadString(url, "POST", data);
		}
		public static byte[] Upload(string url, NameValueCollection data, WebClient wc)
		{
			wc.Headers[HttpRequestHeader.ContentType] = "application/x-www-form-urlencoded";
			return wc.UploadValues(url, "POST", data);
		}
		#endregion


		#region Threading
		public static void ProcessInThread(CUserList page)
        {
            ThreadPool.QueueUserWorkItem(new WaitCallback(ProcessPageBegin), page);
        }
        public static void ProcessPageBegin(object state)
        {
            CUserList page = (CUserList)state;
            ProcessPage(page);
        }
        #endregion


        #region Interface
        public static void ProcessPage(CUserList page) { ProcessPage(page, new CLog()); }
        public static void ProcessPage(CUserList page, CLog l)
        {
            foreach (CUser i in page)
                try
                {
                    Process(i);
                    i.ClearData();
                }
                catch (Exception ex)
                {
                    l.Log(ex);
                }
            l.Log("Processed Page: ", CUtilities.CountSummary(page, "user"));
        }
        public static CCheck Process(CUser u)
        {
            //Tools
            var l = new CLog(u);
            WebClient wc = new WebClient();
            JavaScriptSerializer jss = new JavaScriptSerializer();


            //Start the check
            CCheck c = new CCheck();
            c.User = u;
            c.CheckErrors = null;

            if (u.UserTokenHasExpired)
                return null;

            //No user-token error
            if (string.IsNullOrEmpty(u.UserToken))
            {
                u.UserTokenHasExpired = true;
                u.Save();

                c.CheckErrors = "No User Token";
                c.Save();
                return c;
            }

            //Record the start
            c.Save();

            try
			{

				//StorePhotos(wc, u);
				//ExtendLifeOfToken(wc, u, c);

				//Sometimes update user
				if (u.UserNodeGuid == Guid.Empty || u.UserUpdated.AddDays(7) < DateTime.Now)
                    GetBasicDetails(wc, jss, u);

                //Incremental 
                LookForNewData(c, wc, jss, u);


				//if (u.UserTokenExpires.AddHours(-1) < DateTime.Now) //Less than an hour remaining - attempt renewal
				//    ExtendLifeOfToken(wc, u, c);
			}
			catch (Exception ex)
            {
                if (ex.Message.Contains("expired"))
                {
                    c.CheckErrors = "Expired Token";

                    u.UserTokenHasExpired = true;
                    u.Save();
                }
                else
                {
                    c.CheckErrors = ex.ToString();
                    l.Log(ex);
                }

                try
                {
                    var e = new CDictError(ex);
                    c.CheckErrors = e.Message;
                }
                catch { }
            }
            finally
            {
                wc.Dispose();
            }
            c.CheckFinished = DateTime.Now;
            c.Save();
            return c;
        }
        #endregion

        #region StorePhotos
        private static void StorePhotos(WebClient wc, CUser u)
        {
            //Photo file storage
            var ids = CPhoto.GetIds(u);
            StorePhotos(wc, u, ids);
        }

        private static void StorePhotos(WebClient wc, CUser u, List<long> ids)
		{
            //Photo nodes
            var t = CSignature.Cache.GetByHash(new CSignature("photo"))[0].Type;
            var pp = u.All.GetByTypeId(t.TypeId);
            foreach (var i in pp)
                if (!ids.Contains(i.Root.Id1))
                    PutPhoto(wc, u, i);

            //Photos in post
			t = CSignature.Cache.GetByHash(new CSignature("post"))[0].Type;
			var posts = u.All.GetByTypeId(t.TypeId);
            foreach (var i in posts)
                StorePhotos(wc, u, ids, i);
		}

        private static void StorePhotos(WebClient wc, CUser u, List<long> ids, CNode post)
        {
            var r = post.Root;
            var f = r.Fields;
            if (!f.Has("attachments"))
                return;

            var storedIds = r.PhotoIds;
            if (null == storedIds)
                storedIds = new List<long>();
            var before = CUtilities.ListToString(storedIds);

            var a = f["attachments"].AsCompositeList;
            foreach (var j in a)
            {
                var rr = j.Fields;
                if (!rr.Has("subattachments"))
                {
                    if (rr.Has("url") && rr.Has("target"))
                        SavePhoto(j, wc, u, post.NodeCreated, storedIds, ids);//post should include "images" => download the first one
                    continue;
                }
                var sa = rr["subattachments"].AsCompositeList;

                foreach (var k in sa)
                    SavePhoto(k, wc, u, post.NodeCreated, storedIds, ids);
            }
            var after = CUtilities.ListToString(storedIds);
            if (storedIds.Count > 0 && before != after)
            {
                r.PhotoIds = storedIds;
                post.Save();
                u.Log.Log("Stored ", storedIds.Count, " photos  for ", CUtilities.Truncate(r.ContentMessageOrName));
            }
        }


        private static void SavePhoto(CNodeBasic i, WebClient wc, CUser u, DateTime created, List<long> storedIds, List<long> allIds)
		{
			if (!i.Fields.Has("media"))
				return;
			var media = i.GetComposite("media");

			if (!media.Fields.Has("image"))
				return;
			var image = media.GetComposite("image");

			if (!i.Fields.Has("target"))
				return;
			var target = i.GetComposite("target");

            if (!target.Fields.Has("id"))
                return;
            long id = long.Parse(target.GetString("id"));
			if (storedIds.Contains(id))
				return;

			if (allIds.Contains(id))
			{
				storedIds.Add(id);
				return;
			}

            if (PutPhoto(wc, u, image.Fields, id, created))
                storedIds.Add(id);
		}
        private static bool PutPhoto(WebClient wc, CUser u, CNode i)
        {
            var ff = i.Root.Fields;
            return PutPhoto(wc, u, ff, i.Root.Id1, i.NodeCreated);
        }
        private static bool PutPhoto(WebClient wc, CUser u, CFieldList ff, long id, DateTime created)
        {
            if (!ff.Has("source"))
                if (!ff.Has("src"))
                    return false;

            var url = ff.Has("source") ? ff["source"].AsString : ff["src"].AsString;

            try
            {
                var blob = wc.DownloadData(url);
                if (ff.Has("width"))
                {
                    var width = ff["width"].AsLong;
                    var height = ff["height"].AsLong;
                    CPhoto.PutPhoto(id, u.UserId, blob, created, (int)width, (int)height);
                }
                else
                    CPhoto.PutPhoto(id, u.UserId, blob, created);

                u.Log.Log("Uploaded photo #" , id , " " , CUtilities.FileSize(blob.Length));
                return true;
            }
            catch (Exception ex)
            {
                u.Log.Log(ex);
                return false;
            }
        }
		#endregion

		#region Update User
		private static void GetBasicDetails(WebClient wc, JavaScriptSerializer jss, CUser u)
        {
            var temp = CDiscovery.Cache;

            //User Type
            var t = CSoftUser.Type;
            var s = t.Schema;

            //Get Data
            Console.WriteLine("Getting user details...");
            var soft = t.DownloadItem(u, wc, jss);
            Console.WriteLine("Retrieved");

            var freshNode = new CNodeRoot(soft, t, u);
            if (!t.HasMeta && freshNode.HasMeta)
            {
                t.Metadata = freshNode.Metadata;
                t.Save();
            }
            

            var staleNode = u.Root;

            var oldHash = staleNode.Hash();
            if (oldHash == freshNode.Hash())
                return;
            staleNode.Fields.Merge(freshNode.Fields, u);

            u.Save();
        }
        #endregion 

        #region Security
        public static void ExtendLifeOfToken(CUser u)
        {
            WebClient wc = new WebClient();
            try
            {
                ExtendLifeOfToken(wc, u, null);
            }
            catch (Exception ex)
            {
                var d = new CDictError(ex);
                if (d.Message.Contains("expired"))
                {
                    u.UserTokenHasExpired = true;
                    u.Save();
                }
                throw;
            }
            finally
            {
                wc.Dispose();
            }
        }

        private static void ExtendLifeOfToken(WebClient wc, CUser u, CCheck c)
        {
            if (u.UserTokenExpires.AddDays(-59) > DateTime.Now) //Can't update it more than once per day
                return;

            try
            {
                var dt = u.ExtendToken(wc);
            }
            catch (Exception ex)
            {
                var e = new json.CDictError(ex);
                if (null != c)
                    c.CheckErrors = e.Message;

                try
                {
                    var t = u.GetToken(wc);
                    if (t.AccessToken != u.UserToken)
                    {
                        u.UserToken = t.AccessToken;
                        u.UserTokenExpires = DateTime.Now.AddDays(60); //Should last 60days
                        u.Save();
                    }
                }
                catch (Exception ex2)
                {
                    e = new json.CDictError(ex2);
                    if (null != c)
                        c.CheckErrors += "\r\n" + e.Message;
                }
                if (null != c)
                    c.Save();
            }
        }

        #endregion





        #region Update Logic
        private static void LookForNewData(CCheck c, WebClient wc, JavaScriptSerializer jss, CUser user)
        {
            user.All = null;

            //TODO:
            //1. Do posts first (including images)
            var p = CType.Cache.GetBySig("post");
            LookForNewData(c, wc, jss, user, p, false);

            StorePhotos(wc, user);

            //1. do comments/likes/edges in a separate pass
            //2. get images for each post
            //3. get message

            //Loop through types
            foreach (var i in CType.Cache.Manual)
                if (!string.IsNullOrEmpty(i.TypeApiPath) && i.TypeHasId && i.TypeApiPath != "me" && i.TypeName != "post" && i.TypeName != "event")
                    LookForNewData(c, wc, jss, user, i);

        }
        private static void LookForNewData(CCheck c, WebClient wc, JavaScriptSerializer jss, CUser user, CType type, bool lookForEdges = false)
        {
			//type.Metadata = null;
            var current = GetCurrentData(c, wc, jss, user, type);
            c.Save();
            if (null == current)
                return;

            var existing = user.All.GetByTypeId(type.TypeId);

            ResolveDifferences(c, wc, jss, existing, current, user.Log, type, user, lookForEdges);
            c.Save();


            //Small photo!!
            /*
			if (type.TypeName.ToLower() == "photo")
			{
				var ids = CPhoto.GetIds(user);
				foreach (var i in current)
				{
					var p = new CSoftPhoto(i, user);
					if (!p.HasPicture)
						continue;

					try
					{
						var blob = wc.DownloadData(p.Picture);
						if (p.HasWidth)
						{
							CPhoto.PutPhoto(i.Id1, user.UserId, blob, i.CreatedTime, p.Width, p.Height);
						}
						else
							CPhoto.PutPhoto(i.Id1, user.UserId, blob, i.CreatedTime);

						Console.WriteLine("Uploaded photo #" + i.Id + " " + CUtilities.FileSize(blob.Length));
					}
					catch (Exception ex)
					{
						Console.WriteLine(ex.Message);
					}
				}
			}
			*/
        }

        private static void LookForNewEdges(CNodeRootList current, CNodeList existing, CCheck c, WebClient wc, JavaScriptSerializer jss, CUser user, CType type)
        {
            if (!type.HasMeta)
                return;

            var changed = new CNodeList();
            foreach (var i in type.Metadata.Connections)
            {
                var url = i.Url;
                switch (i.Name)
                {

                    case "comments":
                        foreach (var j in current)
                        {
                            if (PossibleChange(j, "comments", "comment_count", user))
                            {
                                DownloadEdge(c, wc, jss, "comment", i, user, CSchema.Facebook.FIELDS_COMMENTS, j);
                                var node = existing.GetById(j);
                                if (null != node && !changed.Contains(node))
                                    changed.Add(node);
                            }
                        }
                        break;

                    case "likes":
                        foreach (var j in current)
                        {
                            if (PossibleChange(j, "likes", "like_count", user))
                            {
                                DownloadEdge(c, wc, jss, "like", i, user, CSchema.Facebook.FIELDS_LIKES, j);
                                var node = existing.GetById(j);
                                if (null != node && !changed.Contains(node))
                                    changed.Add(node);
                            }
                        }
                        break;

                    case "photos":
                        foreach (var j in current)
                        {
                            /*
                            if (PossibleChange(j, "photos", "photo_count", user))
                            {
                                DownloadEdge(c, wc, jss, "photo", i, user, CSchema.Facebook.FIELDS_PHOTOS, j);
                                var node = existing.GetById(j);
                                if (null != node && !changed.Contains(node))
                                    changed.Add(node);
                            }
                            */
                        }
                        break;

                    case "attachments":
                        foreach (var j in current)
                        {
                            //Checks
                            if (j.Name.Contains("cover photo"))
                                continue;
                            if (j.Fields.Has("attachments"))
                                continue;

                            //Now done from main url
                            if (true)
                                continue;

                            //skip if already have some
                            var exist = existing.GetById(j);
                            if (null != exist)
                            {
                                var soft = new CSoftPost(exist.Root, user);
                                if (soft.CountAttachments > 0)
                                    if (new Random().Next(1, 6) != 5)
                                        continue;
                            }

                            DownloadEdge(c, wc, jss, "attachment", i, user, CSchema.Facebook.FIELDS_ATTACHMENTS, j);
                            var node = existing.GetById(j);
                            if (null != node && !changed.Contains(node))
                                changed.Add(node);
                        }
                        break;

                    default:
                        Console.WriteLine("Skipped: " + type.TypeName + ": " + i.Name);
                        break;
                        //case "comment":
                }
            }

            foreach (var i in changed)
                i.Save();
        }
        private static bool PossibleChange(CNodeRoot fresh, string collectionTag, string countTag, CUser user)
        {
            //Downloaded data includes comments
            if (fresh.Fields.Has(collectionTag))
                return false;

            //Known to be none
            var count = fresh.GetString(countTag);
            if (count == "0")
                return false;

            //Known to be no new ones
            var old = user.All.GetById(fresh);
            if (null != old)
            {
                var r = old.Root;
                if (old.Root.Fields.Has(collectionTag))
                {
                    var coll = old.Root.Fields["comments"].AsPointerList(user.All);
                    if (coll.Count.ToString() == count)
                        return false;
                }
            }
            return true;
        }
        private static void DownloadEdge(CCheck c, WebClient wc, JavaScriptSerializer jss, string typeName, CConnection edge, CUser u, string fields, CNodeRoot node, bool getMeta = true)
        {
            CType type = CType.Cache.GetBySig(typeName);
            //Url
            var url = edge.Url
                .Replace("[Id]", node.Id)
                .Replace("[UserToken]", u.UserToken);
            if (getMeta)
                url = string.Concat(url, "&metadata=1");
            if (!string.IsNullOrEmpty(fields))
                url = string.Concat(url, fields);

            //Download
            object[] edges = null;
            try
            {
                u.Log.Log("Checking for ", edge.Name, "... ", node.Name);
                edges = GetCurrentData(c, wc, jss, u, new CType(), url);
                u.Log.Log("Found ", edges.Length);

                AddOrUpdateField(edges, edge, type, u, node);
            }
            catch (Exception ex)
            {
                u.Log.Log(ex);
            }
        }
        private static void AddOrUpdateField(object[] children, CConnection i, CType type, CUser user, CNodeRoot j)
        {
            //Console.WriteLine(CUtilities.CountSummary(attachments, "attachment", "none"));
            if (null == children || children.Length == 0)
                return;

            var a = new CDict(children[0]);

            //Get type
            var t = new CType();
            var s = new CSignature(i.Name);
            var ss = CSignature.Cache.GetByHash(s);
            if (ss.Count == 1)
                t = ss[0].Type;
            else
            {
                string p = type.TypeName + "/" + i.Name;
                t = CType.Cache.GetOrCreate(p, a, user);
                t.TypeName = i.Name;
                t.Save();
                CTypeList.AddSignatures(t, s);
            }

            //Get tag
            var tag = t.GetOrCreateTag(i.Name, children, user);
            //Make field, add or blend
            if (tag.FieldType.IsReference)
            {
                if (j.Fields.Has(tag.TagName))
                {
                    var f = (CFieldPointer)j.Fields[tag.TagName];
                    var f2 = new CFieldPointer(j, tag, children, user);
                    f.Values = f2.Values;
                }
                else
                    j.Fields.Add(new CFieldPointer(j, tag, children, user));
            }
            else
            {
                if (j.Fields.Has(tag.TagName))
                {
                    var f = (CFieldComposite)j.Fields[tag.TagName];

                    for (var k = 0; k < children.Length; k++)
                        children[k] = new CNodeBasic((Dictionary<string, object>)children[k], f, t, user);
                    f.Values = children.ToArray();
                }
                else
                    j.Fields.Add(new CFieldComposite(j, tag, children, user));
            }
        }
        private static void ResolveDifferences(CCheck c, WebClient wc, JavaScriptSerializer jss, CNodeList existing, CNodeRootList current, CLog l, CType type, CUser u, bool lookForEdges)
        {
            //INSERTS
            var adds = current.ResolveAdds(existing);
            c.CheckInserts = adds.Count;
            CNodeList added = null;
            try
            {
                added = ApplyAdds(adds);
            }
            catch (Exception ex)
            {
                l.Log(ex);
            }

            //DELETES
            var dels = current.ResolveDeletes(existing);
            if (dels.Count > 0)
                if (type.TypeId == u.UserTypeId || type.TypeName.ToLower() == "user")
                    if (dels.Contains(u.UserNodeGuid))
                        dels.Remove(u.UserNodeGuid);

            c.CheckDeletes = dels.Count;

            try
            {
                //ApplyDeletes(dels, type.TypeId);
            }
            catch (Exception ex)
            {
                l.Log(ex);
            }

            //UPDATES: TODO - ignore missing/null fields, add new fields, merge different fields
            var diff = current.MergeChanges(existing, u);
            //var diff = changes.Diff(current); //** todo: review these two methods
            c.CheckUpdates = diff.Count;
            if (diff.Count > 0)
                try
                {
                    ApplyUpdates(diff);
                }
                catch (Exception ex)
                {
                    l.Log(ex);
                }


            //Edges
            if (null != added)
                existing.AddRange(added);
            if (type.HasMeta && lookForEdges)
                LookForNewEdges(current, existing, c, wc, jss, u, type);
        }
        private static CNodeList ApplyAdds(CNodeRootList adds)
        {
            var list = new CNodeList();
            foreach (var i in adds)
                list.Add(new CNode(i, adds.UserId));

            foreach (var i in list)
                if (i.NodeGuid != Guid.Empty)
                    i.Save();
                else
                    ;
            //list.SaveAll(); //TODO: Bulk-insert
            return list;
        }
        private static void ApplyDeletes(List<Guid> dels)
        {
            foreach (var i in dels)
                SchemaMonitor.CNode.DeleteById(i);
        }
        private static void ApplyUpdates(CChangeSet diff)
        {
            foreach (CNode i in diff.Keys)
            {
                i.Save();
                var d = diff[i];
                var c = new CChange(i, d);
                c.Save();
            }
        }
        #endregion



        #region Generic Paging
        private static CNodeRootList GetCurrentData(CCheck c, WebClient wc, JavaScriptSerializer jss, CUser u, CType t)
        {
            if (string.IsNullOrEmpty(t.TypeApiPath))
                return null;

            try
            {
                return GetCurrentData(c, wc, jss, u, t, t.Schema);
            }
            catch (Exception ex)
            {
                Console.WriteLine("Failed! (" + t.TypeName + "s)");
                u.Log.Log(ex);
                return null;
            }
        }
        private static CNodeRootList GetCurrentData(CCheck c, WebClient wc, JavaScriptSerializer jss, CUser u, CType t, ISchema s)
        {
            //Download, deserialise 
            c.CheckApiCalls += 1;

            //First Page
            var page = u.GetByType(t, wc, jss);

			//Build the list, page at a time
			var allPages = RecursePaging(c, t, wc, jss, page, u);

			//Single object, as a page
			if (allPages.Count == 1)
            {
				Expand(allPages.ToArray(), wc, jss, c, t, 7);
				var packed = new CNodeRootList(u);
                packed.AddPage(allPages.ToArray(), t, u);
                return packed;
            }


			var join = new CNodeRootList(u);
            join.AddPage(allPages, t, u);
            return join;
        }
		private static object[] GetCurrentData(CCheck c, WebClient wc, JavaScriptSerializer jss, CUser u, CType t, string url)
		{
			//Download, deserialise 
			c.CheckApiCalls += 1;

			//First Page
			var page = DownloadPage(url, wc, jss);

			//Build the list, page at a time
			return RecurseCursors(c, t, wc, jss, page, u).ToArray();
			//return RecursePaging(c, t, wc, jss, page).ToArray();
		}
		private static List<object> RecursePaging(CCheck c, CType t, WebClient wc, JavaScriptSerializer jss, CPage page, CUser u)
		{
			var allPages = new List<object>();
			allPages.AddRange(page.Data);

			//More pages
			if (page.HasPaging && page.Paging.HasNext)
				while (page.HasPaging && page.Paging.HasNext)
				{
					c.CheckApiCalls += 1;

                    page = DownloadPage(page.Paging.NextUrl, wc, jss);
                    allPages.AddRange(page.Data);
                    u.Log.Log("Paging: " , page.Data.Length.ToString(), " ", t.TypeName, "s, ", allPages.Count, " so far");
                }
            u.Log.Log("Finished: " + allPages.Count.ToString(), " ", t.TypeName, "s");

			Expand(allPages, wc, jss, c, t, 7);
			return allPages;
		}
		private static List<object> RecurseCursors(CCheck c, CType t, WebClient wc, JavaScriptSerializer jss, CPage page, CUser u)
		{
			var allPages = new List<object>();
			allPages.AddRange(page.Data);

			//More pages
            while (page.HasCursors && page.Cursors.HasAfter && page.Data.Length > 9) //Small set probably has no more?
            {
                c.CheckApiCalls += 1;

                string url = t.UrlGetNext(u, page.Cursors.After);   //cursor + token + metadata
				string norm = url.Replace(u.UserToken, "");
				Guid guid = CBinary.Sha128_(CBinary.StringToBytes(norm));

                page = DownloadPage(url, wc, jss);
                allPages.AddRange(page.Data);
            }
            if (page.Data.Length > 9)
			    u.Log.Log("Finished Recurse Cursors: ", allPages.Count, " ", t.TypeName, "s");


			Expand(allPages, wc, jss, c, t, 7);
			return allPages;
		}


		//List Array expansion (trivial
		private static void Expand(List<object> list, WebClient wc, JavaScriptSerializer jss, CCheck c, CType t, int levels = 2)
		{
			Expand(list.ToArray(), wc, jss, c, t, levels);
		}
		private static void Expand(object[] arr, WebClient wc, JavaScriptSerializer jss, CCheck c, CType t, int levels=2)
        {
			if (levels <= 0)
			{
				Console.WriteLine("Max Levels reached: " + t.TypeName);
				return;
			}

            foreach (object i in arr)
                if (i is Dictionary<string, object>)
                    Expand((Dictionary<string, object>)i, wc, jss, c, t, levels);
        }

        public static void Expand(Dictionary<string, object> d, WebClient wc, JavaScriptSerializer jss, CCheck c, CType t, int levels)
        {
			//Recursion limit
			if (levels <= 0)
			{
				Console.WriteLine("No More expansion: " + t.TypeName);
				return;
			}
			levels--;

			//Check if paging
			CPage p = null;
            if (d is CPage)
                p = (CPage)d;
            else
			    p = new CPage(d);


			//Not a page
			if (!p.HasData)
            {
                foreach (var i in d.Values)
                    if (i is Dictionary<string, object>)
                        Expand((Dictionary<string, object> )i, wc, jss, c, t, levels);
                return;
            }


			//Small page - ignore cursors?
            if (p.Data.Length < 9)
            {
                Expand(p.Data, wc, jss, c, t, levels);
                return;
            }


			//Paging Expansions
			var list = new List<object>();
			if (p.HasPaging)
            {
				list.AddRange(p.Data);
                if (p.Paging.HasNext)
                {
                    while (p.HasPaging && p.Paging.HasNext)
				    {
					    Console.WriteLine(t.TypeName + " has next/prev paging, expanding...");

                        p = DownloadPage(p.Paging.NextUrl, wc, jss);
                        if (!p.HasData)
                            break;
                        list.AddRange(p.Data);
                    }
                    Console.WriteLine(string.Concat("Expand list done (", t.TypeName, "): " + list.Count));
                    d["data"] = list.ToArray();
                    d.Remove("paging");
				    p.Paging = null;
                }
            }


			//Cursor Expansions
			if (p.HasCursors && false)  //type is wrong (is child), paging is wrong
			{
				Console.WriteLine(t.TypeName + " has cursors, expanding...");
				if (p.Cursors.HasAfter)
					while (p.HasCursors && p.Cursors.HasAfter)
					{
						string url = t.UrlGetNext(c.User, p.Cursors.After);
						c.CheckApiCalls += 1;
						p = DownloadPage(url, wc, jss);
						if (!p.HasData)
							break;
						list.AddRange(p.Data);
					}
				else
					while (p.HasCursors && p.Cursors.HasBefore)
					{
						string url = t.UrlGetPrev(c.User, p.Cursors.Before);
						c.CheckApiCalls += 1;
						p = DownloadPage(url, wc, jss);
						if (!p.HasData)
							break;
						list.AddRange(p.Data);
						Console.WriteLine("Expanded list (for ", t.TypeName, ") by " + p.Data.Length + " " + list.Count.ToString());
					}

				Console.WriteLine("Expand list done (", t.TypeName, "): " + list.Count.ToString());
				d["data"] = list.ToArray();
				d.Remove("paging");
			}

			Expand(list, wc, jss, c, t, levels);
        }

        #endregion
    }
}
