Hinweis
Für den Zugriff auf diese Seite ist eine Autorisierung erforderlich. Sie können versuchen, sich anzumelden oder das Verzeichnis zu wechseln.
Für den Zugriff auf diese Seite ist eine Autorisierung erforderlich. Sie können versuchen, das Verzeichnis zu wechseln.
I find myself debating the features of my blog here, vs https://spaces.msn.com/members/keithrichie
I think I'll utilize this blog space for developer related stuff, and my spaces one for everything else.
Besides, I don't like how the spaces site doesn't give me the option of how wide my blog entry spot can be.
Regardless, I wanted to post an update to the sample "Document Library" extraction code I wrote a while back.
// Sample code to either Calculate size of all files or Export Files from a web
// BEGIN Sample.cs
using System;using Microsoft.SharePoint;using System.IO;using System.Collections;namespace ExportWebFiles{ class AppClass { static string m_strExportDir = null; static bool m_RecurseWebs = false; static bool m_RecurseFolders = false; [STAThread] static void Main(string[] args) { // Assume that the first argument is a valid URL SPSite siteColl = new SPSite(args[0]); // For sample purposes, let's recurse everything // You could make this an arg m_RecurseWebs = true; // recurse ALL folders found on the web. // You could make this an arg m_RecurseFolders = true; // Get the path to dump the files to if we want to export the files, otherwise // this is a good code template to start from to file level analysis on your webs if(args.Length > 1 && (args[1].ToLower().CompareTo("-export")) == 0) m_strExportDir = args[2].ToString(); // You could use this code to run through all the files in every folder // on the web, to calculate the total size of the files in the web. // something like the following: long lTotalSize = 0; // Kick it off lTotalSize = GetWebDetails(siteColl.RootWeb); Console.WriteLine("Done!!!"); Console.WriteLine("Total size of all files in all folders is: " + lTotalSize); } static long GetWebDetails(SPWeb web) { long lWebTotal = 0; Console.WriteLine("Analyzing web " + web.Name + " at location " + web.ServerRelativeUrl); // enumerate over all the folders on the web foreach(SPFolder folder in web.Folders) { lWebTotal += GetFolderDetails(folder); } // Do the same for all subwebs if(m_RecurseWebs) { foreach(SPWeb subweb in web.Webs) lWebTotal += GetWebDetails(subweb); } Console.WriteLine("Total size of all files in all folders for " + web.Name + " is: " + lWebTotal); return lWebTotal; } static long GetFolderDetails(SPFolder folder) { Console.WriteLine("Processing Folder " + folder.ServerRelativeUrl); string strPath = null; long lFolderSize = 0; // If we're exporting the files from the web to the local file system, // create the appropriate file system folder if(m_strExportDir != null) { strPath = CheckPathAndCreate(folder.ServerRelativeUrl); } foreach(SPFile file in folder.Files) { // If we're exporting, stream out the files. if(m_strExportDir != null) { string sFileLoc = m_strExportDir + strPath + "\\" + file.Name; Console.WriteLine(" Exporting file " + file.Name + " to " + sFileLoc); byte[] binFile = file.OpenBinary(); FileStream fs = new FileStream(sFileLoc,FileMode.OpenOrCreate,FileAccess.Write); fs.Write(binFile,0,binFile.Length); fs.Close(); } // You could use this code to run through all the files in every folder // on the web, to calculate the total size of the files in the web. // something like the following: lFolderSize += file.Length; } if(m_RecurseFolders) { foreach(SPFolder subfolder in folder.SubFolders) GetFolderDetails(subfolder); } return lFolderSize; } // Break apart the URL and make a File System directory heirarchy from it if needed. static string CheckPathAndCreate(string ServerRelativeUrl) { string strPath; strPath = ServerRelativeUrl.Replace("/","\\"); // ServerRelativeUrl will begin with "/" so we'll be cool to just append it onto m_strExportDir System.IO.Directory.CreateDirectory(m_strExportDir + strPath); return strPath; } }}// END Sample.cs |