From patchwork Thu Jun 27 19:04:33 2024 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Patchwork-Submitter: =?utf-8?q?R=C3=A9mi_Denis-Courmont?= X-Patchwork-Id: 50198 Delivered-To: ffmpegpatchwork2@gmail.com Received: by 2002:a05:612c:2d88:b0:482:c625:d099 with SMTP id jb8csp15665vqb; Thu, 27 Jun 2024 12:04:58 -0700 (PDT) X-Forwarded-Encrypted: i=2; AJvYcCUfNVniCTdyi+1jLJGFFmJAfKflkYWVfVB/K+mhxNlPItJO9JyLCO+qa4aA0nOKNPGYAhKbmemqIaWIcUSddmVeNRUiz6fEPQtcdg== X-Google-Smtp-Source: AGHT+IFvE2pBisdiZVniY1GEwy8jXLwtxHQRKJ61r3n5MvgjCACWOoTT/0Gu9Krl0PLJ/woI1xEX X-Received: by 2002:a50:ccd0:0:b0:57d:4fbc:3e61 with SMTP id 4fb4d7f45d1cf-57d4fbc40a2mr11003473a12.4.1719515097749; Thu, 27 Jun 2024 12:04:57 -0700 (PDT) ARC-Seal: i=1; a=rsa-sha256; t=1719515097; cv=none; d=google.com; s=arc-20160816; b=qk+i9EgZ/FgbmqUYajET1JYu5gyb07m/BmAebEF80bFj/i77DcVAFBszQEtGG2M0QX YLIDYpJUsrZUUeWGNHR7fJykrax3fcqE6X9NRzpWc342tR6vhYuzFM4zCrBPpMB4dHm0 l0GJptQ5DSWmOlS0ksK6a5qgDsCtfXpKY0wagfJ5mhTtZXdS7GuAfVYbmYESXLZ1+Qts 8DovnHsWpwMPh1t0mEwHhmvwxNvJALT1qXE0bTFFNNbpLV7zrgciicQKORgnIAP1vmzE lrfa3M9FnAkO7DlNp7j07Sr/F9bCzkwxuNg94coZLgZ84+07nHV4UrqlTBYYsC/J35ie P2AA== ARC-Message-Signature: i=1; a=rsa-sha256; c=relaxed/relaxed; d=google.com; s=arc-20160816; h=sender:errors-to:content-transfer-encoding:reply-to:list-subscribe :list-help:list-post:list-archive:list-unsubscribe:list-id :precedence:subject:mime-version:message-id:date:to:from :delivered-to; bh=tauzzZGnRMrdfd6FqIdg4by3JlY1BQPFzg3MGXrElsM=; fh=YOA8vD9MJZuwZ71F/05pj6KdCjf6jQRmzLS+CATXUQk=; b=QSN8XKkuaLUZzdNn8v32j3669A1eZUxTFyXYFZJEpzyigaDV5H5YGrHNUOtRlF0cSR ECMssaIy4CWf5bnMK1J2nzeTjbuEB0LW/HbgTfup+UFQfNcUDbYVVMH2x6pDq6+PnFZA iv0ezZLALvDow9hJDN7OpF0UpiM25R2zZNrGCvXmOcPqC07vxbfY3L5I32lQjyfnm4Wn /qL8M4RN6P7cNRS93RqowGv02X0bUVsJMaDzPTOIa2FA8EJRtA12rImn1Pqhj/770Lkh YFVV9I+xFh6H/0tD2bJsJDLpilPsQBoS6BZ9JwV68cZiPfbT98ZBkD26li7HkXjprAqa DpEw==; dara=google.com ARC-Authentication-Results: i=1; mx.google.com; spf=pass (google.com: domain of ffmpeg-devel-bounces@ffmpeg.org designates 79.124.17.100 as permitted sender) smtp.mailfrom=ffmpeg-devel-bounces@ffmpeg.org Return-Path: Received: from ffbox0-bg.mplayerhq.hu (ffbox0-bg.ffmpeg.org. [79.124.17.100]) by mx.google.com with ESMTP id 4fb4d7f45d1cf-58613de02e7si36716a12.168.2024.06.27.12.04.57; Thu, 27 Jun 2024 12:04:57 -0700 (PDT) Received-SPF: pass (google.com: domain of ffmpeg-devel-bounces@ffmpeg.org designates 79.124.17.100 as permitted sender) client-ip=79.124.17.100; Authentication-Results: mx.google.com; spf=pass (google.com: domain of ffmpeg-devel-bounces@ffmpeg.org designates 79.124.17.100 as permitted sender) smtp.mailfrom=ffmpeg-devel-bounces@ffmpeg.org Received: from [127.0.1.1] (localhost [127.0.0.1]) by ffbox0-bg.mplayerhq.hu (Postfix) with ESMTP id 2107C68D622; Thu, 27 Jun 2024 22:04:46 +0300 (EEST) X-Original-To: ffmpeg-devel@ffmpeg.org Delivered-To: ffmpeg-devel@ffmpeg.org Received: from ursule.remlab.net (vps-a2bccee9.vps.ovh.net [51.75.19.47]) by ffbox0-bg.mplayerhq.hu (Postfix) with ESMTP id 91FC068CDFE for ; Thu, 27 Jun 2024 22:04:36 +0300 (EEST) Received: from basile.remlab.net (localhost [IPv6:::1]) by ursule.remlab.net (Postfix) with ESMTP id A0D4AC013B for ; Thu, 27 Jun 2024 22:04:35 +0300 (EEST) From: =?utf-8?q?R=C3=A9mi_Denis-Courmont?= To: ffmpeg-devel@ffmpeg.org Date: Thu, 27 Jun 2024 22:04:33 +0300 Message-ID: <20240627190435.12159-1-remi@remlab.net> X-Mailer: git-send-email 2.45.2 MIME-Version: 1.0 Subject: [FFmpeg-devel] [PATCH 1/3] lavc/vc1dsp: factor R-V V inv_trans_8 code X-BeenThere: ffmpeg-devel@ffmpeg.org X-Mailman-Version: 2.1.29 Precedence: list List-Id: FFmpeg development discussions and patches List-Unsubscribe: , List-Archive: List-Post: List-Help: List-Subscribe: , Reply-To: FFmpeg development discussions and patches Errors-To: ffmpeg-devel-bounces@ffmpeg.org Sender: "ffmpeg-devel" X-TUID: 5AxkjKyRNFKB --- libavcodec/riscv/vc1dsp_rvv.S | 64 +++++++++++++---------------------- 1 file changed, 23 insertions(+), 41 deletions(-) diff --git a/libavcodec/riscv/vc1dsp_rvv.S b/libavcodec/riscv/vc1dsp_rvv.S index 7e1fb84b0c..b3a1f55ab9 100644 --- a/libavcodec/riscv/vc1dsp_rvv.S +++ b/libavcodec/riscv/vc1dsp_rvv.S @@ -165,6 +165,7 @@ func ff_vc1_inv_trans_8_rvv, zve32x vsll.vi v23, v7, 4 vsub.vv v20, v20, v21 vsub.vv v22, v22, v23 + srli t2, t1, 2 vadd.vv v0, v28, v16 vadd.vv v19, v20, v22 # t4 vadd.vv v1, v29, v17 @@ -174,6 +175,14 @@ func ff_vc1_inv_trans_8_rvv, zve32x vsub.vv v5, v30, v18 vsub.vv v6, v29, v17 vsub.vv v7, v28, v16 + beqz t2, 1f # faster than 4x add t2=zero + .irp n,4,5,6,7 + vadd.vi v\n, v\n, 1 + .endr +1: + .irp n,0,1,2,3,4,5,6,7 + vssra.vx v\n, v\n, t1 + .endr jr t0 endfunc @@ -220,35 +229,22 @@ func ff_vc1_inv_trans_8x8_rvv, zve32x addi a7, a0, 7 * 8 * 2 vle16.v v6, (a6) vle16.v v7, (a7) + li t1, 3 jal t0, ff_vc1_inv_trans_8_rvv - .irp n,0,1,2,3,4,5,6,7 - vssra.vi v\n, v\n, 3 - .endr vsseg8e16.v v0, (a0) .irp n,0,1,2,3,4,5,6,7 vle16.v v\n, (a\n) .endr + li t1, 7 jal t0, ff_vc1_inv_trans_8_rvv - vadd.vi v4, v4, 1 - vadd.vi v5, v5, 1 - vssra.vi v4, v4, 7 - vssra.vi v5, v5, 7 - vse16.v v4, (a4) - vadd.vi v6, v6, 1 - vse16.v v5, (a5) - vadd.vi v7, v7, 1 - vssra.vi v6, v6, 7 - vssra.vi v7, v7, 7 - vse16.v v6, (a6) - vssra.vi v0, v0, 7 - vse16.v v7, (a7) - vssra.vi v1, v1, 7 vse16.v v0, (a0) - vssra.vi v2, v2, 7 vse16.v v1, (a1) - vssra.vi v3, v3, 7 vse16.v v2, (a2) vse16.v v3, (a3) + vse16.v v4, (a4) + vse16.v v5, (a5) + vse16.v v6, (a6) + vse16.v v7, (a7) ret endfunc @@ -256,10 +252,8 @@ func ff_vc1_inv_trans_8x4_rvv, zve32x csrwi vxrm, 0 vsetivli zero, 4, e16, mf2, ta, ma vlseg8e16.v v0, (a2) + li t1, 3 jal t0, ff_vc1_inv_trans_8_rvv - .irp n,0,1,2,3,4,5,6,7 - vssra.vi v\n, v\n, 3 - .endr vsseg8e16.v v0, (a2) addi a3, a2, 1 * 8 * 2 vsetivli zero, 8, e16, m1, ta, ma @@ -323,33 +317,21 @@ func ff_vc1_inv_trans_4x8_rvv, zve32x addi t1, a2, 7 * 8 * 2 vle16.v v6, (t6) vle16.v v7, (t1) - + li t1, 7 jal t0, ff_vc1_inv_trans_8_rvv - vadd.vi v4, v4, 1 add t0, a1, a0 - vadd.vi v5, v5, 1 - vadd.vi v6, v6, 1 - add t1, a1, t0 - vadd.vi v7, v7, 1 - vssra.vi v0, v0, 7 - add t2, a1, t1 - vssra.vi v1, v1, 7 - vssra.vi v2, v2, 7 - add t3, a1, t2 - vssra.vi v3, v3, 7 - vssra.vi v4, v4, 7 - add t4, a1, t3 - vssra.vi v5, v5, 7 - vssra.vi v6, v6, 7 - add t5, a1, t4 - vssra.vi v7, v7, 7 vle8.v v8, (a0) - add t6, a1, t5 + add t1, a1, t0 vle8.v v9, (t0) + add t2, a1, t1 vle8.v v10, (t1) + add t3, a1, t2 vle8.v v11, (t2) + add t4, a1, t3 vle8.v v12, (t3) + add t5, a1, t4 vle8.v v13, (t4) + add t6, a1, t5 vle8.v v14, (t5) vle8.v v15, (t6) vsetvli zero, zero, e8, mf4, ta, ma