• Ira Weiny's avatar
    btrfs: use memcpy_[to|from]_page() and kmap_local_page() · 3590ec58
    Ira Weiny authored
    There are many places where the pattern kmap/memcpy/kunmap occurs.
    
    This pattern was lifted to the core common functions
    memcpy_[to|from]_page().
    
    Use these new functions to reduce the code, eliminate direct uses of
    kmap, and leverage the new core functions use of kmap_local_page().
    
    Also, there is 1 place where a kmap/memcpy is followed by an
    optional memset.  Here we leave the kmap open coded to avoid remapping
    the page but use kmap_local_page() directly.
    
    Development of this patch was aided by the coccinelle script:
    
    // <smpl>
    // SPDX-License-Identifier: GPL-2.0-only
    // Find kmap/memcpy/kunmap pattern and replace with memcpy*page calls
    //
    // NOTE: Offsets and other expressions may be more complex than what the script
    // will automatically generate.  Therefore a catchall rule is provided to find
    // the pattern which then must be evaluated by hand.
    //
    // Confidence: Low
    // Copyright: (C) 2021 Intel Corporation
    // URL: http://coccinelle.lip6.fr/
    // Comments:
    // Options:
    
    //
    // simple memcpy version
    //
    @ memcpy_rule1 @
    expression page, T, F, B, Off;
    identifier ptr;
    type VP;
    @@
    
    (
    -VP ptr = kmap(page);
    |
    -ptr = kmap(page);
    |
    -VP ptr = kmap_atomic(page);
    |
    -ptr = kmap_atomic(page);
    )
    <+...
    (
    -memcpy(ptr + Off, F, B);
    +memcpy_to_page(page, Off, F, B);
    |
    -memcpy(ptr, F, B);
    +memcpy_to_page(page, 0, F, B);
    |
    -memcpy(T, ptr + Off, B);
    +memcpy_from_page(T, page, Off, B);
    |
    -memcpy(T, ptr, B);
    +memcpy_from_page(T, page, 0, B);
    )
    ...+>
    (
    -kunmap(page);
    |
    -kunmap_atomic(ptr);
    )
    
    // Remove any pointers left unused
    @
    depends on memcpy_rule1
    @
    identifier memcpy_rule1.ptr;
    type VP, VP1;
    @@
    
    -VP ptr;
    	... when != ptr;
    ? VP1 ptr;
    
    //
    // Some callers kmap without a temp pointer
    //
    @ memcpy_rule2 @
    expression page, T, Off, F, B;
    @@
    
    <+...
    (
    -memcpy(kmap(page) + Off, F, B);
    +memcpy_to_page(page, Off, F, B);
    |
    -memcpy(kmap(page), F, B);
    +memcpy_to_page(page, 0, F, B);
    |
    -memcpy(T, kmap(page) + Off, B);
    +memcpy_from_page(T, page, Off, B);
    |
    -memcpy(T, kmap(page), B);
    +memcpy_from_page(T, page, 0, B);
    )
    ...+>
    -kunmap(page);
    // No need for the ptr variable removal
    
    //
    // Catch all
    //
    @ memcpy_rule3 @
    expression page;
    expression GenTo, GenFrom, GenSize;
    identifier ptr;
    type VP;
    @@
    
    (
    -VP ptr = kmap(page);
    |
    -ptr = kmap(page);
    |
    -VP ptr = kmap_atomic(page);
    |
    -ptr = kmap_atomic(page);
    )
    <+...
    (
    //
    // Some call sites have complex expressions within the memcpy
    // match a catch all to be evaluated by hand.
    //
    -memcpy(GenTo, GenFrom, GenSize);
    +memcpy_to_pageExtra(page, GenTo, GenFrom, GenSize);
    +memcpy_from_pageExtra(GenTo, page, GenFrom, GenSize);
    )
    ...+>
    (
    -kunmap(page);
    |
    -kunmap_atomic(ptr);
    )
    
    // Remove any pointers left unused
    @
    depends on memcpy_rule3
    @
    identifier memcpy_rule3.ptr;
    type VP, VP1;
    @@
    
    -VP ptr;
    	... when != ptr;
    ? VP1 ptr;
    
    // <smpl>
    Reviewed-by: default avatarChristoph Hellwig <hch@lst.de>
    Signed-off-by: default avatarIra Weiny <ira.weiny@intel.com>
    Reviewed-by: default avatarDavid Sterba <dsterba@suse.com>
    Signed-off-by: default avatarDavid Sterba <dsterba@suse.com>
    3590ec58
lzo.c 11.6 KB