# package deb

# import (
#     "bufio"
#     "bytes"
#     "encoding/json"
#     "fmt"
#     "io/ioutil"
#     "log"
#     "os"
#     "path/filepath"
#     "sort"
#     "strings"
#     "sync"
#     "time"

#     "github.com/pborman/uuid"
#     "github.com/ugorji/go/codec"

#     "github.com/aptly-dev/aptly/aptly"
#     "github.com/aptly-dev/aptly/database"
#     "github.com/aptly-dev/aptly/pgp"
#     "github.com/aptly-dev/aptly/utils"
# )
from database import database
from deb.local import *
from deb.reflist import *
from typing import List,Dict
from deb.snapshot import *
import pickle
import aptly
import utils.listup
from deb.index_files import *
from deb.contents import *
import tempfile
from pgp.gnupg import *
from files.public import *

from context.context import *
class repoSourceItem :
#     // Pointer to snapshot if SourceKind == "snapshot"
    snapshot :Snapshot=None
#     // Pointer to local repo if SourceKind == "local"
    localRepo :LocalRepo=None
#     // Package references is SourceKind == "local"
    packageRefs :PackageRefList=None
# }

# // PublishedRepo is a published for http/ftp representation of snapshot as Debian repository
class PublishedRepo :
#     // Internal unique ID
    UUID :str=''
#     // Storage & Prefix & distribution should be unique across all published repositories
    Storage              :str=''
    Prefix               :str=''
    Distribution         :str=''
    Origin               :str=''
    NotAutomatic         :str=''
    ButAutomaticUpgrades :str=''
    Label                :str=''
    Suite                :str=''
    Codename             :str=''
#     // Architectures is a list of all architectures published
    Architectures :List[str]=[]
#     // SourceKind is "local"/"repo"
    SourceKind :str=''

#     // Map of sources by each component: component name -> source UUID
    Sources :dict=''

#     // Legacy fields for compatibility with old published repositories (< 0.6)
    Component :str=''
#     // SourceUUID is UUID of either snapshot or local repo
    SourceUUID :str=''
#     // Map of component to source items
    sourceItems :Dict[str,repoSourceItem]={}

#     // Skip contents generation
    SkipContents :bool=None

#     // True if repo is being re-published
    rePublishing :bool=None

#     // Provide index files per hash also
    AcquireByHash :bool=None

    # // String returns human-readable representation of PublishedRepo
    def  String(p) ->str:
        sources = []

        for  component in p.Components():
            source :str=''

            item = p.sourceItems[component]
            if item.snapshot is not None  :
                source = item.snapshot.String()
            elif item.localRepo is not None  :
                source = item.localRepo.String()
            else :
                raise Exception("no snapshot/localRepo")
            

            sources . append(sources,  "{{{}: {}}}".format( component, source))
        

        extras :List[str]=[]
        extra :str=''

        if p.Origin != "" :
            extras.append( "origin: {}".format(p.Origin))
        

        if p.NotAutomatic != "" :
            extras .append(extras, "notautomatic: {}".format(p.NotAutomatic))
        

        if p.ButAutomaticUpgrades != "" :
            extras . append(extras, "butautomaticupgrades: {}".format(p.ButAutomaticUpgrades))
        

        if p.Label != "" :
            extras . append(extras, "label: {}".format( p.Label))
        

        if p.Suite != "" :
            extras . append(extras, "suite: {}".format( p.Suite))
        if p.Codename != "" :
            extras.append(  "codename: {}".format(p.Codename))
    

        extra = ''.join(extras, ", ")

        if extra != "" :
            extra = " (" + extra + ")"
        

        return "{}/{}{} [{}] publishes {}".format(p.StoragePrefix(), p.Distribution, extra, ', '.join(p.Architectures ),', '.join(sources ))
    # // StoragePrefix returns combined storage & prefix for the repo
    def  StoragePrefix(p) ->str :
        result = p.Prefix
        if p.Storage != "" :
            result = p.Storage + ":" + p.Prefix
        
        return result
    # // Key returns unique key identifying PublishedRepo
    def Key(p) :
        return  "U" + p.StoragePrefix() + ">>" + p.Distribution
    
    # // RefKey is a unique id for package reference list
    def RefKey(p,component :str) :
        return  "E" + p.UUID + component
    

    # // RefList returns list of package refs in local repo
    def  RefList(p,component :str ) ->PackageRefList :
        item = p.sourceItems[component]
        if p.SourceKind == SourceLocalRepo :
            return item.packageRefs
        
        if p.SourceKind == SourceSnapshot :
            return item.snapshot.RefList()
        
        raise Exception("unknown source")
    

    # // Components returns sorted list of published repo components
    def Components(p) ->List[str] :
        result =[]# make([]string, 0, len(p.Sources))
        for component in p.Sources :
            result.append( component)
        

        # sort.Strings(result)
        return result
    # }

# // Components returns sorted list of published repo source names
    def  SourceNames(p) ->List[str]:
        sources :List[str]= []

        for component in p.Components() :
            source :str=''

            item = p.sourceItems[component]
            if item.snapshot is not None  :
                source = item.snapshot.Name
            elif item.localRepo is not None  :
                source = item.localRepo.Name
            else :
                raise ("no snapshot/localRepo")
            

            sources.append("{}:{}".format(source, component))
        

        # sort.Strings()
        sources.sort()
        return sources
    
    
    # // UpdateLocalRepo updates content from local repo in component
    def   UpdateLocalRepo(p,component :str) :
        if p.SourceKind != SourceLocalRepo :
            raise Exception("not local repo publish")
        

        item = p.sourceItems[component]
        item.packageRefs = item.localRepo.RefList()
        p.sourceItems[component] = item

        p.rePublishing = True


    # // UpdateSnapshot switches snapshot for component
    def UpdateSnapshot(p,component :str, snapshot :Snapshot) :
        if p.SourceKind != SourceSnapshot :
            raise Exception("not snapshot publish")
        

        item = p.sourceItems[component]
        item.snapshot = snapshot
        p.sourceItems[component] = item

        p.Sources[component] = snapshot.UUID
        p.rePublishing = True
    # // Encode does msgpack encoding of PublishedRepo
    def  Encode(p) ->bytes:
        # var buf bytes.Buffer

        # encoder = codec.NewEncoder(&buf, &codec.MsgpackHandle{})
        # encoder.Encode(p)
        pick=pickle.dumps(p)
        return pick
    
    # // Decode decodes msgpack representation into PublishedRepo
    def  Decode(p:"PublishedRepo",inputData ) ->"PublishedRepo":
        # decoder = codec.NewDecoderBytes(input, &codec.MsgpackHandle{})
        # err = decoder.Decode(p)
        # if err is not None  {
        #     return err
        # }
        p=pickle.loads(inputData)
        # // old PublishedRepo were publishing only snapshots
        if p.SourceKind == "" :
            p.SourceKind = SourceSnapshot
        

        # // <0.6 aptly used single SourceUUID + Component instead of Sources
        if p.Component != "" and p.SourceUUID != "" and len(p.Sources) == 0 :
            NoDict={}
            NoDict[p.Component]= p.SourceUUID
            p.Sources = NoDict
            p.Component = ""
            p.SourceUUID = ""
        

        return None
    

    # // GetOrigin returns default or manual Origin:
    def  GetOrigin(p) ->str:
        if p.Origin == "" :
            return p.Prefix + " " + p.Distribution
        
        return p.Origin
    
    # // GetLabel returns default or manual Label:
    def  GetLabel(p) ->str :
        if p.Label == "" :
            return p.Prefix + " " + p.Distribution
        
        return p.Label
    # // GetPath returns the unique name of the repo
    def  GetPath(p) ->str:
        prefix = p.StoragePrefix()

        if prefix == "" :
            return p.Distribution
        

        return "{}/{}".format(prefix, p.Distribution)
    

    # // GetSuite returns default or manual Suite:
    def  GetSuite(p) ->str :
        if p.Suite == "" :
            return p.Distribution
        
        return p.Suite
    
    # // GetCodename returns default or manual Codename:
    def GetCodename(p) -> str:
        if p.Codename == "" :
            return p.Distribution
        
        return p.Codename
    

    # // Publish publishes snapshot (repository) contents, links package files, generates Packages & Release files, signs them
    def  Publish(p,packagePool , publishedStorageProvider :"AptlyContext",
        collectionFactory  , signer :GpgSigner , progress  , forceOverwrite :bool) :
        publishedStorage = publishedStorageProvider.GetPublishedStorage(p.Storage)
        err = publishedStorage.MkDir(os.path.join(p.Prefix, "pool"))
        if err is not None  :
            return err
        
        basePath = os.path.join(p.Prefix, "dists", p.Distribution)
        err = publishedStorage.MkDir(basePath)
        if err is not None  :
            return err
        
        
        tempDB = collectionFactory.db
        # if err is not None  :
        #     return err
        
        # defer def() {
        #     e = tempDB.Close()
        #     if e is not None  and progress is not None  {
        #         progress.Printf("failed to close temp DB: %s", err)
        #     }
        #     e = tempDB.Drop()
        #     if e is not None  and progress is not None  {
        #         progress.Printf("failed to drop temp DB: %s", err)
        #     }
        # }()

        if progress is not None  :
            print("Loading packages...\n")
        

        lists ={}# map[string]*PackageList{}

        for component in p.sourceItems :
            # // Load all packages
            lists[component], err = NewPackageListFromRefList(p.RefList(component), collectionFactory.PackageCollection(), progress)
            if err is not None  :
                return  "unable to load packages: %s".format(err)
            
     
        if not p.rePublishing :
            if len(p.Architectures) == 0 :
                for  lista in lists :
                    p.Architectures =p.Architectures+lists[lista].Architectures(True)
                

            if len(p.Architectures) == 0 :
                return  "unable to figure out list of architectures, please supply explicit list"
            

            # sort.Strings(p.Architectures)
           
            p.Architectures = utils.listup.StrSliceDeduplicate(p.Architectures)
        

        suffix =''
        if p.rePublishing :
            suffix = ".tmp"
        

        if progress is not None  :
            progress.Printf("Generating metadata files and linking package files...\n")
        

        tempDir=tempfile.TemporaryDirectory('aptly')
        # tempDir, err = ioutil.TempDir(os.TempDir(), "aptly")
        # if err is not None  {
        #     return err
        # }
        # defer os.RemoveAll(tempDir)

        indexes = newIndexFiles(publishedStorage, basePath, tempDir.name, suffix, p.AcquireByHash)

        legacyContentIndexes ={}# map[string]*ContentsIndex{}
        def NoName(pkg :Package) :
            hadUdebs = False
            if progress is not None  :
                # progress.AddBar(1)
                print(1)
            

            for  arch in p.Architectures :
                if pkg.MatchesArchitecture(arch) :
                    hadUdebs = hadUdebs or pkg.IsUdeb

                    relPath:str='' 
                    if not pkg.IsInstaller :
                        poolDir, err2 = pkg.PoolDirectory()
                        if err2 is not None  :
                            return err2
                        
                        relPath = os.path.join("pool", component, poolDir)
                    else :
                        if p.Distribution == aptly.DistributionFocal :
                            relPath = os.path.join("dists", p.Distribution, component, "{}-{}".format( pkg.Name, arch), "current", "legacy-images")
                        else :
                            relPath = os.path.join("dists", p.Distribution, component, "{}-{}".format( pkg.Name, arch), "current", "images")
                        
                    

                    err = pkg.LinkFromPool(publishedStorage, packagePool, p.Prefix, relPath, forceOverwrite)
                    if err is not None  :
                        return err
                    
                    break
                
            
            # // Start a db batch. If we fill contents data we'll need
            # // to push each path of the package into the database.
            # // We'll want this batched so as to avoid an excessive
            # // amount of write() calls.
            tempBatch =tempDB# tempDB.CreateBatch()
            # defer tempBatch.Write()

            for  arch in p.Architectures :
                if pkg.MatchesArchitecture(arch) :
                    # var bufWriter *bufio.Writer

                    if not p.SkipContents and not pkg.IsInstaller :
                        key = "{}-{}".format( arch, pkg.IsUdeb)
                        qualifiedName =  pkg.QualifiedName()
                        contents = pkg.Contents(packagePool, progress)
                        # []map[string]*ContentsIndex{contentIndexes, legacyContentIndexes} 
                        contentIndexList =ContentsIndex()
                        contentIndexList.append(contentIndexes)
                        contentIndexList.append(legacyContentIndexes)
                        for  contentIndexesMap in contentIndexList:
                            contentIndex = contentIndexesMap.get(key)

                            if contentIndex is None  :
                                contentIndex = NewContentsIndex(tempDB)
                                contentIndexesMap[key] = contentIndex
                            

                            contentIndex.Push(qualifiedName, contents, tempBatch)
                        
                    

                    bufWriter, err = indexes.PackageIndex(component, arch, pkg.IsUdeb, pkg.IsInstaller, p.Distribution).BufWriter()
                    if err is not None  :
                        return err
                    

                    err = pkg.Stanza().WriteTo(bufWriter, pkg.IsSource, False, pkg.IsInstaller)
                    if err is not None  :
                        return err
                    
                    bufWriter.write('\n')
                    
            pkg.files = None
            pkg.deps = None
            pkg.extra = None
            pkg.contents = None

            return None
        
        for component, listb in lists.items() :
            hadUdebs = False

            # // For all architectures, pregenerate packages/sources files
            for arch in p.Architectures :
                indexes.PackageIndex(component, arch, False, False, p.Distribution)
            

            if progress is not None  :
                # progress.InitBar(int64(listb.Len()), False)
                print(listb.Len(), False)
            

            listb.PrepareIndex()

            contentIndexes ={}# map[string]*ContentsIndex{}

            err = listb.ForEachIndexed(NoName)

            if err is not None  :
                return  "unable to process packages: {}".format(err)
            

            for  arch in p.Architectures :
                for  udeb in [True, False] :
                    index = contentIndexes.get("{}-{}".format(arch, udeb))
                    # index是空数组的时候会有问题
                    if index is None  or index.Empty() :
                        continue
                    

                    # var bufWriter *bufio.Writer
                    bufWriter, err = indexes.ContentsIndex(component, arch, udeb).BufWriter()
                    if err is not None  :
                        return  "unable to generate contents index: {}".format(err)
                    

                    _, err = index.WriteTo(bufWriter)
                    if err is not None  :
                        return  "unable to generate contents index: {}".format(err)
                    
                
            

            # if progress is not None  {
            #     progress.ShutdownBar()
            # }

            udebs = [False]#bool{False}
            if hadUdebs :
                udebs .append(  True)

                # // For all architectures, pregenerate .udeb indexes
                for   arch in p.Architectures :
                    indexes.PackageIndex(component, arch, True, False, p.Distribution)
                
            

            # // For all architectures, generate Release files
            for   arch in p.Architectures :
                for  udeb in udebs :
                    release = Stanza()#make(Stanza)
                    release["Archive"] = p.Distribution
                    release["Architecture"] = arch
                    release["Component"] = component
                    release["Origin"] = p.GetOrigin()
                    release["Label"] = p.GetLabel()
                    release["Suite"] = p.GetSuite()
                    release["Codename"] = p.GetCodename()
                    if p.AcquireByHash :
                        release["Acquire-By-Hash"] = "yes"
                    

                    # var bufWriter *bufio.Writer
                    bufWriter, err = indexes.ReleaseIndex(component, arch, udeb).BufWriter()
                    if err is not None  :
                        return "unable to get ReleaseIndex writer: {}".format(err)
                    

                    err = release.WriteTo(bufWriter, False, True, False)
                    if err is not None  :
                        return "unable to create Release file: {}".format(err)
                    
                
            
        

        for   arch in p.Architectures :
            for   udeb in [True, False ]:
                index = legacyContentIndexes.get("{}-{}".format (arch, udeb))
                if index is None  or index.Empty() :
                    continue
                

                # var bufWriter *bufio.Writer
                bufWriter, err = indexes.LegacyContentsIndex(arch, udeb).BufWriter()
                if err is not None  :
                    return  "unable to generate contents index: {}".format(err)
                

                _, err = index.WriteTo(bufWriter)
                if err is not None  :
                    return  "unable to generate contents index: {}".format( err)
                
            
        

        if progress is not None  :
            print("Finalizing metadata files...\n")
        

        err = indexes.FinalizeAll(progress, signer)
        if err is not None  :
            return err
        

        release =Stanza()# make(Stanza)
        release["Origin"] = p.GetOrigin()
        if p.NotAutomatic != "" :
            release["NotAutomatic"] = p.NotAutomatic
        
        if p.ButAutomaticUpgrades != "" :
            release["ButAutomaticUpgrades"] = p.ButAutomaticUpgrades
        import datetime
        release["Label"] = p.GetLabel()
        release["Suite"] = p.GetSuite()
        release["Codename"] = p.GetCodename()
        release["Date"] = datetime.datetime.now()#.UTC().Format("Mon, 2 Jan 2006 15:04:05 MST")
        
        release["Architectures"] = " ".join(utils.listup.StrSlicesSubstract(p.Architectures, ArchitectureSource))
        if p.AcquireByHash :
            release["Acquire-By-Hash"] = "yes"
        
        release["Description"] = " Generated by aptly\n"
        release["MD5Sum"] = ""
        release["SHA1"] = ""
        release["SHA256"] = ""
        release["SHA512"] = ""

        release["Components"] = " ".join(p.Components())

        sortedPaths = []#make([]string, 0, len(indexes.generatedFiles))
        for path in indexes.generatedFiles :
            sortedPaths.append(  path)
        
        # sort.Strings(sortedPaths)

        for   path in sortedPaths :
            info = indexes.generatedFiles[path]
            release["MD5Sum"] += " {} {} {}\n".format(info.MD5, info.Size, path)
            release["SHA1"] += " {} {} {}\n".format(info.SHA1, info.Size, path)
            release["SHA256"] += " {} {} {}\n".format(info.SHA256, info.Size, path)
            release["SHA512"] += " {} {} {}\n".format(info.SHA512, info.Size, path)
        

        releaseFile = indexes.ReleaseFile()
        bufWriter, err = releaseFile.BufWriter()
        if err is not None  :
            return err
        

        err = release.WriteTo(bufWriter, False, True, False)
        if err is not None  :
            return "unable to create Release file: {}".format( err)
        

        # // Signing files might output to console, so flush progress writer first
        # if progress is not None  {
        #     progress.Flush()
        # }

        err = releaseFile.Finalize(signer)
        if err is not None  :
            return err
        

        return indexes.RenameFiles()
    
        
    


# // ParsePrefix splits [storage:]prefix into components
def ParsePrefix(param :str):
    # DONE 此处注意，可能会有问题
    i = param.rfind( ":")
    if i != -1 :
        storage = param[:i]
        prefix = param[i+1:]
        if prefix == "" :
            prefix = "."
        
    else :
        prefix = param
    
    prefix = prefix.strip('/')
    return storage, prefix
from deb.deb import *
from deb.snapshot import *
from deb.remote import *
# // walkUpTree goes from source in the tree of source snapshots/mirrors/local repos
# // gathering information about declared components and distributions
def walkUpTree(source , collectionFactory):
    # var (
    #     head    interface{}
    #     current = []interface{}{source}
    # )
    current = [source]
    head=None
    rootComponents = []
    rootDistributions = []

    # // walk up the tree from current source up to roots (local or remote repos)
    # // and collect information about distribution and components
    while len(current) > 0 :
        head, current = current[0], current[1:]
            
        if isinstance(head,Snapshot) :
            snapshot=head
            for _, uuid in snapshot.SourceIDs :
                if snapshot.SourceKind == SourceRemoteRepo :
                    remoteRepo, err = collectionFactory.RemoteRepoCollection().ByUUID(uuid)
                    if err is not None  :
                        continue
                    
                    current . append(  remoteRepo)
                elif snapshot.SourceKind == SourceLocalRepo :
                    localRepo, err = collectionFactory.LocalRepoCollection().ByUUID(uuid)
                    if err is not None  :
                        continue
                    
                    current.append(  localRepo)
                elif snapshot.SourceKind == SourceSnapshot :
                    snap, err = collectionFactory.SnapshotCollection().ByUUID(uuid)
                    if err is not None  :
                        continue
                    
                    current.append( snap)
                
            
        elif isinstance(head,LocalRepo) :
        
            localRepo=head
            if localRepo.DefaultDistribution != "" :
                rootDistributions .append(rootDistributions, localRepo.DefaultDistribution)
            
            if localRepo.DefaultComponent != "" :
                rootComponents.append(  localRepo.DefaultComponent)
            
        
        elif isinstance(head,RemoteRepo) :
            remoteRepo=head
            if remoteRepo.Distribution != "" :
                rootDistributions .append( remoteRepo.Distribution)
            
            rootComponents =rootComponents+remoteRepo.Components
        else :
            raise Exception("unknown type")
        
    

    return rootDistributions , rootComponents 


# // NewPublishedRepo creates new published repository
# //
# // storage is PublishedStorage name
# // prefix specifies publishing prefix
# // distribution and architectures are user-defined properties
# // components & sources are lists of component to source mapping (*Snapshot or *LocalRepo)
def NewPublishedRepo(storage, prefix, distribution :str, architectures :List[str],
    components :List[str], sources :list, collectionFactory )->PublishedRepo:
    result=PublishedRepo()

    
    result.UUID=          str(uuid.uuid4())
    result.Storage=       storage
    result.Architectures= architectures
    result.Sources=   {}   # make(map[string]string)
    result.sourceItems= {}#  make(map[string]repoSourceItem)
    

    if len(sources) == 0 :
        raise Exception("publish with empty sources")
    

    if len(sources) != len(components) :
        raise Exception("sources and components should be equal in size")
    discoveredDistributions = []
    source                  ={} #interface{}
    component               :str=''
    # snapshot                *Snapshot
    localRepo               =LocalRepo()
    fields                  ={}# make(map[string][]string)
#     )

#     // get first source
    source = sources[0]

#     // figure out source kind
#     switch source.(type) {
    if isinstance(source,Snapshot):
        result.SourceKind = SourceSnapshot
    elif isinstance(source,LocalRepo):
        result.SourceKind = SourceLocalRepo
    else:
        raise Exception("unknown source kind")
#     }

    for i in range(len(sources)) :
        component, source = components[i], sources[i]
        if distribution == "" or component == "" :
            rootDistributions, rootComponents = walkUpTree(source, collectionFactory)
            if distribution == "" :
                # for i in rootDistributions :
                #     rootDistributions[i] = rootDistributions[i].replace( "/", "-")
                
                discoveredDistributions=discoveredDistributions+rootDistributions
            
            if component == "" :
                # sort.Strings(rootComponents)
                if len(rootComponents) > 0 and rootComponents[0] == rootComponents[len(rootComponents)-1] :
                    component = rootComponents[0]
                elif len(sources) == 1 :
                    # // only if going from one source, assume default component "main"
                    component = "main"
                else :
                    return None, "unable to figure out component name for {}".format(source)
                
            
        

        exists = result.Sources.get(component)
        if exists :
            return None, "duplicate component name: {}".format( component)
        

        if result.SourceKind == SourceSnapshot :
            snapshot = source
            result.Sources[component] = snapshot.UUID
            newRepoSourceItem=repoSourceItem()
            newRepoSourceItem.snapshot=snapshot
            result.sourceItems[component] = newRepoSourceItem

            if not utils.StrSliceHasItem(fields["Origin"], snapshot.Origin) :
                fields["Origin"] .append( snapshot.Origin)
            
            if not utils.StrSliceHasItem(fields["NotAutomatic"], snapshot.NotAutomatic) :
                fields["NotAutomatic"] .append(  snapshot.NotAutomatic)
            
            if not utils.StrSliceHasItem(fields["ButAutomaticUpgrades"], snapshot.ButAutomaticUpgrades) :
                fields["ButAutomaticUpgrades"].append(  snapshot.ButAutomaticUpgrades)
            
        elif result.SourceKind == SourceLocalRepo :
            localRepo = source
            result.Sources[component] = localRepo.UUID
            newRepoSourceItem=repoSourceItem()
            newRepoSourceItem.localRepo=localRepo
            newRepoSourceItem.packageRefs=localRepo.RefList()
            result.sourceItems[component] = newRepoSourceItem
        
    

    # // clean & verify prefix
    # prefix = filepath.Clean(prefix)
    prefix = prefix.strip("/")
    # prefix = filepath.Clean(prefix)

    for  part in prefix.split( "/") :
        if part == ".." or part == "dists" or part == "pool" :
            return None,  "invalid prefix {}".format(prefix)
        
    

    result.Prefix = prefix

    # // guessing distribution
    if distribution == "" :
        # sort.Strings(discoveredDistributions)
        if len(discoveredDistributions) > 0 and discoveredDistributions[0] == discoveredDistributions[len(discoveredDistributions)-1] :
            distribution = discoveredDistributions[0]
        else :
            return None,  "unable to guess distribution name, please specify explicitly"
        
    

    if "/" in distribution:
        return None, "invalid distribution {}, '/' is not allowed".format(distribution)
    

    result.Distribution = distribution

    # // only fields which are unique by all given snapshots are set on published
    if len(fields.get("Origin",[])) == 1 :
        result.Origin = fields.get("Origin")[0]
    
    if len(fields.get("NotAutomatic",[])) == 1 :
        result.NotAutomatic = fields["NotAutomatic"][0]
    
    if len(fields.get("ButAutomaticUpgrades",[])) == 1 :
        result.ButAutomaticUpgrades = fields["ButAutomaticUpgrades"][0]
    

    return result, None


# // MarshalJSON requires object to be "loeaded completely"
# def (p *PublishedRepo) MarshalJSON() ([]byte, error) {
#     type sourceInfo :
#         Component, Name string
#     }

#     sources = []sourceInfo{}
#     for component, item in p.sourceItems {
#         name = ""
#         if item.snapshot is not None  {
#             name = item.snapshot.Name
#         } elif item.localRepo is not None  {
#             name = item.localRepo.Name
#         } else {
#             raise ("no snapshot/local repo")
#         }
#         sources = append(sources, sourceInfo{
#             Component: component,
#             Name:      name,
#         })
#     }

#     return json.Marshal(map[string]interface{}{
#         "Architectures":        p.Architectures,
#         "Distribution":         p.Distribution,
#         "Label":                p.Label,
#         "Origin":               p.Origin,
#         "Suite":                p.Suite,
#         "NotAutomatic":         p.NotAutomatic,
#         "ButAutomaticUpgrades": p.ButAutomaticUpgrades,
#         "Prefix":               p.Prefix,
#         "SourceKind":           p.SourceKind,
#         "Sources":              sources,
#         "Storage":              p.Storage,
#         "SkipContents":         p.SkipContents,
#         "AcquireByHash":        p.AcquireByHash,
#     })
# }













# // RemoveFiles removes files that were created by Publish
# //
# // It can remove prefix fully, and part of pool (for specific component)
# def (p *PublishedRepo) RemoveFiles(publishedStorageProvider aptly.PublishedStorageProvider, removePrefix bool,
#     removePoolComponents []string, progress aptly.Progress) error {
#     publishedStorage = publishedStorageProvider.GetPublishedStorage(p.Storage)

#     // I. Easy: remove whole prefix (meta+packages)
#     if removePrefix {
#         err = publishedStorage.RemoveDirs(os.path.join(p.Prefix, "dists"), progress)
#         if err is not None  {
#             return err
#         }

#         return publishedStorage.RemoveDirs(os.path.join(p.Prefix, "pool"), progress)
#     }

#     // II. Medium: remove metadata, it can't be shared as prefix/distribution as unique
#     err = publishedStorage.RemoveDirs(os.path.join(p.Prefix, "dists", p.Distribution), progress)
#     if err is not None  {
#         return err
#     }

#     // III. Complex: there are no other publishes with the same prefix + component
#     for _, component in removePoolComponents {
#         err = publishedStorage.RemoveDirs(os.path.join(p.Prefix, "pool", component), progress)
#         if err is not None  {
#             return err
#         }
#     }

#     return None
# }

# // PublishedRepoCollection does listing, updating/adding/deleting of PublishedRepos
class PublishedRepoCollection:
#     *sync.RWMutex
    db  :database.Storage
    lista :List[PublishedRepo]=[]
    # // Add appends new repo to collection and saves it
    def Add(collection,repo :PublishedRepo) :
        collection.loadList()

        if collection.CheckDuplicate(repo) is not None  :
            return  "published repo with storage/prefix/distribution {}/{}/{} already exists".format(repo.Storage, repo.Prefix, repo.Distribution)
        

        err = collection.Update(repo)
        if err is not None  :
            return err
        

        collection.lista .append(  repo)
        return None
    # }
    # // CheckDuplicate verifies that there's no published repo with the same name
    def CheckDuplicate(collection,repo :PublishedRepo) ->PublishedRepo :
        collection.loadList()

        for r in collection.lista :
            if r.Prefix == repo.Prefix and r.Distribution == repo.Distribution and r.Storage == repo.Storage :
                return r
            

        return None
    
    # // Update stores updated information about repo in DB
    def  Update(collection,repo :PublishedRepo) :
        transaction= collection.db
        # if err is not None  {
        #     return err
        # }
        # defer transaction.Discard()

        transaction.Put(repo.Key(), repo.Encode())
        # if err is not None  :
        #     return err
        

        if repo.SourceKind == SourceLocalRepo :
            for component, item in repo.sourceItems.items() :
                transaction.Put(repo.RefKey(component), item.packageRefs.Encode())
                
        return 
    # // LoadComplete loads additional information for remote repo
    def   LoadComplete(collection,repo :PublishedRepo, collectionFactory ):
        repo.sourceItems = {}#make(map[string]repoSourceItem)

        if repo.SourceKind == SourceSnapshot :
            for component, sourceUUID in repo.Sources :
                item = repoSourceItem()

                item.snapshot, err = collectionFactory.SnapshotCollection().ByUUID(sourceUUID)
                if err is not None  :
                    return
                
                err = collectionFactory.SnapshotCollection().LoadComplete(item.snapshot)
                if err is not None  :
                    return
                

                repo.sourceItems[component] = item
            
        elif repo.SourceKind == SourceLocalRepo :
            for component, sourceUUID in repo.Sources :
                item = repoSourceItem()

                item.localRepo, err = collectionFactory.LocalRepoCollection().ByUUID(sourceUUID)
                if err is not None  :
                    return
                
                err = collectionFactory.LocalRepoCollection().LoadComplete(item.localRepo)
                if err is not None  :
                    return
                

                # var encoded []byte
                encoded = collection.db.Get(repo.RefKey(component))
                # if err is not None  {
                #     // < 0.6 saving w/o component name
                #     if err == database.ErrNotFound and len(repo.Sources) == 1 {
                #         encoded, err = collection.db.Get(repo.RefKey(""))
                #     }

                #     if err is not None  {
                #         return
                #     }
                # }

                item.packageRefs = PackageRefList()
                err = item.packageRefs.Decode(encoded)
                # if err is not None  {
                #     return
                # }

                repo.sourceItems[component] = item
            
        else :
            raise Exception("unknown SourceKind")
        

        return
    
    # // ForEach runs method for each repository
    def ForEach(collection,handler ):
        def NoName(key, blob):
            r = PublishedRepo()
            err=r.Decode(blob)
            if   err is not None  :
                print("Error decoding published repo: %s\n", err)
                return None
            

            return handler(r)
        return collection.db.ProcessByPrefix("U",NoName)
    def  loadList(collection) :
        if collection.lista is not None  :
            return
        

        blobs = collection.db.FetchByPrefix( "U")
        collection.list = []#make([]*PublishedRepo, 0, len(blobs))

        for  blob in blobs :
            r = PublishedRepo()
            err = r.Decode(blob)
            if  err is not None  :
                print("Error decoding published repo: %s\n", err)
            else :
                collection.list .append(  r)
                
# // NewPublishedRepoCollection loads PublishedRepos from DB and makes up collection
def NewPublishedRepoCollection(db :database.Storage) ->PublishedRepoCollection :
    publishedRepoCollection=PublishedRepoCollection()
    publishedRepoCollection.db=db
    return publishedRepoCollection









# // ByStoragePrefixDistribution looks up repository by storage, prefix & distribution
# def (collection *PublishedRepoCollection) ByStoragePrefixDistribution(storage, prefix, distribution string) (*PublishedRepo, error) {
#     collection.loadList()

#     for _, r in collection.list {
#         if r.Prefix == prefix and r.Distribution == distribution and r.Storage == storage {
#             return r, None
#         }
#     }
#     if storage != "" {
#         storage += ":"
#     }
#     return None, fmt.Errorf("published repo with storage:prefix/distribution %s%s/%s not found", storage, prefix, distribution)
# }

# // ByUUID looks up repository by uuid
# def (collection *PublishedRepoCollection) ByUUID(uuid string) (*PublishedRepo, error) {
#     collection.loadList()

#     for _, r in collection.list {
#         if r.UUID == uuid {
#             return r, None
#         }
#     }
#     return None, fmt.Errorf("published repo with uuid %s not found", uuid)
# }

# // BySnapshot looks up repository by snapshot source
# def (collection *PublishedRepoCollection) BySnapshot(snapshot *Snapshot) []*PublishedRepo {
#     collection.loadList()

#     var result []*PublishedRepo
#     for _, r in collection.list {
#         if r.SourceKind == SourceSnapshot {
#             if r.SourceUUID == snapshot.UUID {
#                 result = append(result, r)
#             }

#             for _, sourceUUID in r.Sources {
#                 if sourceUUID == snapshot.UUID {
#                     result = append(result, r)
#                     break
#                 }
#             }
#         }
#     }
#     return result
# }

# // ByLocalRepo looks up repository by local repo source
# def (collection *PublishedRepoCollection) ByLocalRepo(repo *LocalRepo) []*PublishedRepo {
#     collection.loadList()

#     var result []*PublishedRepo
#     for _, r in collection.list {
#         if r.SourceKind == SourceLocalRepo {
#             if r.SourceUUID == repo.UUID {
#                 result = append(result, r)
#             }

#             for _, sourceUUID in r.Sources {
#                 if sourceUUID == repo.UUID {
#                     result = append(result, r)
#                     break
#                 }
#             }
#         }
#     }
#     return result
# }



# // Len returns number of remote repos
# def (collection *PublishedRepoCollection) Len() int {
#     collection.loadList()

#     return len(collection.list)
# }

# // CleanupPrefixComponentFiles removes all unreferenced files in published storage under prefix/component pair
# def (collection *PublishedRepoCollection) CleanupPrefixComponentFiles(prefix string, components []string,
#     publishedStorage aptly.PublishedStorage, collectionFactory *CollectionFactory, progress aptly.Progress) error {

#     collection.loadList()

#     var err error
#     referencedFiles = map[string][]string{}

#     if progress is not None  {
#         progress.Printf("Cleaning up prefix %#v components %s...\n", prefix, ''.join(components, ", "))
#     }

#     for _, r in collection.list {
#         if r.Prefix == prefix {
#             matches = False

#             repoComponents = r.Components()

#             for _, component in components {
#                 if utils.StrSliceHasItem(repoComponents, component) {
#                     matches = True
#                     break
#                 }
#             }

#             if !matches {
#                 continue
#             }

#             err = collection.LoadComplete(r, collectionFactory)
#             if err is not None  {
#                 return err
#             }

#             for _, component in components {
#                 if utils.StrSliceHasItem(repoComponents, component) {
#                     packageList, err = NewPackageListFromRefList(r.RefList(component), collectionFactory.PackageCollection(), progress)
#                     if err is not None  {
#                         return err
#                     }

#                     packageList.ForEach(def(p *Package) error {
#                         poolDir, err = p.PoolDirectory()
#                         if err is not None  {
#                             return err
#                         }

#                         for _, f in p.Files() {
#                             referencedFiles[component] = append(referencedFiles[component], os.path.join(poolDir, f.Filename))
#                         }

#                         return None
#                     })
#                 }
#             }
#         }
#     }

#     for _, component in components {
#         sort.Strings(referencedFiles[component])

#         rootPath = os.path.join(prefix, "pool", component)
#         existingFiles, err = publishedStorage.Filelist(rootPath)
#         if err is not None  {
#             return err
#         }

#         sort.Strings(existingFiles)

#         filesToDelete = utils.StrSlicesSubstract(existingFiles, referencedFiles[component])

#         for _, file in filesToDelete {
#             err = publishedStorage.Remove(os.path.join(rootPath, file))
#             if err is not None  {
#                 return err
#             }
#         }
#     }

#     return None
# }

# // Remove removes published repository, cleaning up directories, files
# def (collection *PublishedRepoCollection) Remove(publishedStorageProvider aptly.PublishedStorageProvider,
#     storage, prefix, distribution string, collectionFactory *CollectionFactory, progress aptly.Progress,
#     force, skipCleanup bool) error {

#     transaction, err = collection.db.OpenTransaction()
#     if err is not None  {
#         return err
#     }
#     defer transaction.Discard()

#     // TODO: load via transaction
#     collection.loadList()

#     repo, err = collection.ByStoragePrefixDistribution(storage, prefix, distribution)
#     if err is not None  {
#         return err
#     }

#     removePrefix = True
#     removePoolComponents = repo.Components()
#     cleanComponents = []string{}
#     repoPosition = -1

#     for i, r in collection.list {
#         if r == repo {
#             repoPosition = i
#             continue
#         }
#         if r.Storage == repo.Storage and r.Prefix == repo.Prefix {
#             removePrefix = False

#             rComponents = r.Components()
#             for _, component in rComponents {
#                 if utils.StrSliceHasItem(removePoolComponents, component) {
#                     removePoolComponents = utils.StrSlicesSubstract(removePoolComponents, []string{component})
#                     cleanComponents = append(cleanComponents, component)
#                 }
#             }
#         }
#     }

#     err = repo.RemoveFiles(publishedStorageProvider, removePrefix, removePoolComponents, progress)
#     if err is not None  {
#         if !force {
#             return fmt.Errorf("published files removal failed, use -force-drop to override: %s", err)
#         }
#         // ignore error with -force-drop
#     }

#     collection.list[len(collection.list)-1], collection.list[repoPosition], collection.list =
#         None, collection.list[len(collection.list)-1], collection.list[:len(collection.list)-1]

#     if !skipCleanup and len(cleanComponents) > 0 {
#         err = collection.CleanupPrefixComponentFiles(repo.Prefix, cleanComponents,
#             publishedStorageProvider.GetPublishedStorage(storage), collectionFactory, progress)
#         if err is not None  {
#             if !force {
#                 return fmt.Errorf("cleanup failed, use -force-drop to override: %s", err)
#             }
#         }
#     }

#     err = transaction.Delete(repo.Key())
#     if err is not None  {
#         return err
#     }

#     for _, component in repo.Components() {
#         err = transaction.Delete(repo.RefKey(component))
#         if err is not None  {
#             return err
#         }
#     }

#     return transaction.Commit()
# }
