/////////////////////////////////////////////////////////////////////
//
//  Tackle - A Scrum Tracking Web Site Solution
//  Original Copyright ?2007 Microsoft Corp.  All rights reserved.
//  Released under Microsoft Permissive License (Ms-PL)
//  http://www.codeplex.com/Tackle/Project/License.aspx
//
/////////////////////////////////////////////////////////////////////

using System;
using System.Data;
using System.Configuration;
using System.Collections;
using System.IO;
using System.Web;
using System.Web.Security;
using System.Web.UI;
using System.Web.UI.WebControls;
using System.Web.UI.WebControls.WebParts;
using System.Web.UI.HtmlControls;

public partial class _Default : System.Web.UI.Page
{
    // Name of the settings to be stored in cookie
    const string ckidEnableRowHighlighting = "Default_EnableRowHighlighting";

    protected void Page_Load(object sender, EventArgs e)
    {
        if (!IsPostBack)
        {
            // read the cookie for the drop down
            chbHighlightRows.Checked = Convert.ToBoolean(TackleUtility.GetCookieInfo(ckidEnableRowHighlighting, "true"));
            chbHighlightRows_CheckedChanged(null, null);

            CleanUpOldFiles();
        }
    }

    protected void chbHighlightRows_CheckedChanged(object sender, EventArgs e)
    {
        // enable or disable row highlighting
        Session["EnableRowHighlighting"] = chbHighlightRows.Checked;

        // set the cookie
        TackleUtility.SetCookieInfo(ckidEnableRowHighlighting, chbHighlightRows.Checked.ToString());
    }
    
    protected void chbHighlightRows_PreRender(object sender, EventArgs e)
    {
        if (Session["EnableRowHighlighting"] != null)
        {
            chbHighlightRows.Checked = (bool)(Session["EnableRowHighlighting"]);
        }
    }

    // there are files for viewstate saved for the master backlog page due to AJAX ASP.NET Beta 2
    // for the time being, we are adding a clean up on this default page. The default page 
    // is a one-shot hit for a user so this should be a good place to do this for now.
    protected void CleanUpOldFiles()
    {
        // we don't need to do this all the time, just once every 7 days is probably enough
        if (DateTime.Now.Day % 7 == 0)
        {
            try
            {
                // get the directory
                System.IO.DirectoryInfo di = new DirectoryInfo(Server.MapPath("viewstate"));
                if (di != null)
                {
                    // get the list of files for the viewstate
                    System.IO.FileInfo[] fi = di.GetFiles("*.viewstate");
                    if (fi.Length > 0)
                    {
                        // find any files more than 7 days old and delete them
                        DateTime dt = DateTime.Now.AddDays(-7);
                        foreach (FileInfo f in fi)
                        {
                            // delete those passed the last 7 days.
                            if (f.CreationTime <= dt)
                                f.Delete();
                        }
                    }
                }
            }
            catch (Exception)
            {
                // don't blow up because delete's fail...
            }
        }
    }

}
