拆分大型CSV的PowerShell,花了很长时间,希望通过代码审查缩短流程

nkkqxpd9  于 2023-02-27  发布在  Shell
关注(0)|答案(2)|浏览(135)

CSV大约有一场演出,有100多万行。我正试着把它一分为五。前4个花了大约18个小时--第5个从来没有发生过(或者我没有足够的耐心)我的电脑将只是停留在睡眠模式,因为它不能解锁。

function Split-Csv{

    [CmdletBinding()]

    Param(

        [Parameter(Mandatory)]

        [string]$FilePath,

        [Parameter()]

        [string]$Delimiter=',',

        [Parameter(Mandatory)]

        [string]$TargetFolder,

        [Parameter(Mandatory)]

        [string]$Name,

        [Parameter(Mandatory)]

        [int]$NumberOfFiles

    )

    try{

        if(-not (Test-Path $TargetFolder)){

            New-Item -Path $TargetFolder -ItemType Directory

        }

        $csvData=Import-Csv -Path $FilePath -Delimiter $Delimiter

        $startRow=0

        $numberOfRowsPerFile=[Math]::Ceiling($csvData.count/$NumberOfFiles)

        $counter=1

        while($startRow -lt $csvData.Count){

            $csvData | Select-Object -Skip $startRow -First $numberOfRowsPerFile | Export-Csv -Path "$TargetFolder\$Name-$counter.csv" -NoTypeInformation -NoClobber

            $startRow+=$numberOfRowsPerFile

            $counter++

        }

    }catch{

        Write-Error $_.Exception.Message

    }

}
Split-Csv -FilePath "C:\temp\2018.csv" -Delimiter ',' -TargetFolder "C:\temp\Split" -Name "Split" -NumberOfFiles 5
d7v8vwbk

d7v8vwbk1#

你可以试一试这个函数,正如我在评论中所说的,如果你使用.NETAPI而不是把CSV当作对象,这将会快得多,也是内存友好的。
此函数使用StreamReaderStreamWriter类逐行读写。
可以通过PS Gallery以及官方的GitHub Repo获得用法示例以及其对应的Merge-Csv

using namespace System.IO
using namespace System.Text
using namespace System.Management.Automation
using namespace System.Management.Automation.Language
using namespace System.Collections
using namespace System.Collections.Generic

# All Credits to jborean93 on the EncodingTransformation Class

# Source: https://gist.github.com/jborean93/50a517a8105338b28256ff0ea27ab2c8#file-get-extendedattribute-ps1

class EncodingTransformation : ArgumentTransformationAttribute {
    [object] Transform([EngineIntrinsics] $EngineIntrinsics, [object] $InputData) {
        $outputData = switch($InputData) {
            { $_ -is [Encoding] } { $_ }

            { $_ -is [string] } {
                switch ($_) {
                    ASCII { [ASCIIEncoding]::new() }
                    BigEndianUnicode { [UnicodeEncoding]::new($true, $true) }
                    BigEndianUTF32 { [UTF32Encoding]::new($true, $true) }
                    ANSI {
                        $raw = Add-Type -Namespace Encoding -Name Native -PassThru -MemberDefinition '
                            [DllImport("Kernel32.dll")]
                            public static extern Int32 GetACP();
                        '
                        [Encoding]::GetEncoding($raw::GetACP())
                    }
                    OEM { [Console]::OutputEncoding }
                    Unicode { [UnicodeEncoding]::new() }
                    UTF8 { [UTF8Encoding]::new($false) }
                    UTF8BOM { [UTF8Encoding]::new($true) }
                    UTF8NoBOM { [UTF8Encoding]::new($false) }
                    UTF32 { [UTF32Encoding]::new() }
                    default { [Encoding]::GetEncoding($_) }
                }
            }

            { $_ -is [int] } { [Encoding]::GetEncoding($_) }

            default {
                throw [ArgumentTransformationMetadataException]::new(
                    "Could not convert input '$_' to a valid Encoding object."
                )
            }
        }

        return $outputData
    }
}

class EncodingCompleter : IArgumentCompleter {
    [string[]] $EncodingSet = @(
        'ascii'
        'bigendianutf32'
        'unicode'
        'utf8'
        'utf8NoBOM'
        'bigendianunicode'
        'oem'
        'utf7'
        'utf8BOM'
        'utf32'
        'ansi'
    )

    [IEnumerable[CompletionResult]] CompleteArgument (
        [string] $commandName,
        [string] $parameterName,
        [string] $wordToComplete,
        [CommandAst] $commandAst,
        [IDictionary] $fakeBoundParameters
    ) {
        [CompletionResult[]] $arguments = foreach($enc in $this.EncodingSet) {
            if($enc.StartsWith($wordToComplete)) {
                [CompletionResult]::new($enc)
            }
        }
        return $arguments
    }
}

function Split-Csv {
    [CmdletBinding(DefaultParameterSetName = 'ByChunks')]
    param(
        [Parameter(Position = 0, Mandatory, ValueFromPipeline, ValueFromPipelineByPropertyName)]
        [alias('FullName')]
        [string] $Path,

        [Parameter(Mandatory)]
        [string] $DestinationFolder,

        [Parameter(ParameterSetName = 'BySize')]
        [int64] $Size = 1kb,

        [Parameter(ParameterSetName = 'ByChunks')]
        [int32] $Chunks = 3,

        [Parameter()]
        [EncodingTransformation()]
        [ArgumentCompleter([EncodingCompleter])]
        [Encoding] $Encoding = 'utf8',

        [Parameter()]
        [switch] $PassThru
    )

    begin {
        $Destination = $PSCmdlet.GetUnresolvedProviderPathFromPSPath($DestinationFolder)

        class ChunkWriter {
            [FileInfo] $Source
            [string] $Destination
            [string] $Headers
            [string] $Format
            [Encoding] $Encoding

            [StreamWriter] GetNewWriter([int32] $Index) {
                $name     = [string]::Format(
                    '{0} - Part {1}{2}',
                    $this.Source.BaseName,
                    $Index.ToString($this.Format),
                    $this.Source.Extension
                )
                $newChunk = Join-Path $this.Destination -ChildPath $name
                $writer   = [StreamWriter]::new($newChunk, $false, $this.Encoding)
                $writer.AutoFlush = $true
                $writer.WriteLine($this.Headers)
                return $writer
            }
        }
    }
    process {
        try {
            [FileInfo] $Path = $PSCmdlet.GetUnresolvedProviderPathFromPSPath($Path)
            $null    = [Directory]::CreateDirectory($Destination)
            $reader  = [StreamReader]::new($Path.FullName, $Encoding, $true)
            $headers = $reader.ReadLine()
            $Index   = 0

            if($PSCmdlet.ParameterSetName -eq 'ByChunks') {
                $chunkSize = ($Path.Length - $headers.Length) / $Chunks + ($headers.Length * $Chunks)
                $format    = 'D{0}' -f $Chunks.ToString().Length
            }
            else {
                $chunkSize = $Size - $headers.Length
                $format    = 'D{0}' -f [math]::Ceiling($Path.Length / $Size).ToString().Length
            }

            $chunkWriter = [ChunkWriter]@{
                Source      = $Path
                Destination = $Destination
                Headers     = $headers
                Format      = $format
                Encoding    = $Encoding
            }

            $writer = $chunkWriter.GetNewWriter($Index++)

            while(-not $reader.EndOfStream) {
                if($writer.BaseStream.Length -ge $chunkSize) {
                    $writer.Dispose()

                    if($PassThru.IsPresent) {
                        $writer.BaseStream.Name -as [FileInfo]
                    }

                    $writer = $chunkWriter.GetNewWriter($Index++)
                }
                $writer.WriteLine($reader.ReadLine())
            }
        }
        catch {
            $PSCmdlet.ThrowTerminatingError($_)
        }
        finally {
            $writer, $reader | ForEach-Object Dispose

            if($PassThru.IsPresent) {
                $writer.BaseStream.Name -as [FileInfo]
            }
        }
    }
}
avwztpqn

avwztpqn2#

我在不同大小的CSV上测试了以下代码:

$incsv = Import-Csv .\BigCsv.csv

$incr = [int]($incsv.Count / 5)
$last = $incsv.Count - 1

$idx = @()

for ($i = 0; $i -le $last; $i += $incr)
{
    $end = $i + ($incr - 1)
    if ($end -gt $last) { $end = $last }
    $idx += @{ 'Start' = $i ; 'End' = $end }
}

for ($i = 0; $i -lt 5; $i++)
{
    $start = $idx[$i].Start
    $end = $idx[$i].End
    $incsv[$start..$end] | Export-Csv -NoType ".\Chunk$($i+1).csv"
}

我计时了导入-CSV和每个区块导出。导入/导出-CSV的伸缩性不是很好,但对于您正在使用的大小/行来说应该是可以的。我的最后一次测试CSV是1.68 GB、约110万行、7200RPM驱动器,安装在一台5代笔记本电脑上--在更好的硬件上应该运行得更快:

Import-Csv : 00:05:51.2411580
Chunk 1    : 00:02:12.3754368
Chunk 2    : 00:01:16.5562393
Chunk 3    : 00:01:13.3434148
Chunk 4    : 00:01:14.5231756
Chunk 5    : 00:01:14.6382049

Total time : 00:13:02.6859795

块1的额外时间可能是与磁盘相关的异常情况--在之前的所有测试中,所有块时间几乎相同,导出的总时间与导入时间大致相同。

相关问题