te')); return $arr; } /* 遍历用户所有主题 * @param $uid 用户ID * @param int $page 页数 * @param int $pagesize 每页记录条数 * @param bool $desc 排序方式 TRUE降序 FALSE升序 * @param string $key 返回的数组用那一列的值作为 key * @param array $col 查询哪些列 */ function thread_tid_find_by_uid($uid, $page = 1, $pagesize = 1000, $desc = TRUE, $key = 'tid', $col = array()) { if (empty($uid)) return array(); $orderby = TRUE == $desc ? -1 : 1; $arr = thread_tid__find($cond = array('uid' => $uid), array('tid' => $orderby), $page, $pagesize, $key, $col); return $arr; } // 遍历栏目下tid 支持数组 $fid = array(1,2,3) function thread_tid_find_by_fid($fid, $page = 1, $pagesize = 1000, $desc = TRUE) { if (empty($fid)) return array(); $orderby = TRUE == $desc ? -1 : 1; $arr = thread_tid__find($cond = array('fid' => $fid), array('tid' => $orderby), $page, $pagesize, 'tid', array('tid', 'verify_date')); return $arr; } function thread_tid_delete($tid) { if (empty($tid)) return FALSE; $r = thread_tid__delete(array('tid' => $tid)); return $r; } function thread_tid_count() { $n = thread_tid__count(); return $n; } // 统计用户主题数 大数量下严谨使用非主键统计 function thread_uid_count($uid) { $n = thread_tid__count(array('uid' => $uid)); return $n; } // 统计栏目主题数 大数量下严谨使用非主键统计 function thread_fid_count($fid) { $n = thread_tid__count(array('fid' => $fid)); return $n; } ?>javascript - What is the difference between a ND-Buffer and a G-Buffer? - Stack Overflow
最新消息:雨落星辰是一个专注网站SEO优化、网站SEO诊断、搜索引擎研究、网络营销推广、网站策划运营及站长类的自媒体原创博客

javascript - What is the difference between a ND-Buffer and a G-Buffer? - Stack Overflow

programmeradmin4浏览0评论

I'm noob at WebGL. I read in several posts of ND-Buffers and G-Buffers as if it were a strategic choice for WebGL development.

How are ND-Buffers and G-Buffers related to rendering pipelines? Are ND-Buffers used only in forward-rendering and G-Buffers only in deferred-rendering?

A JavaScript code example how to implement both would be useful for me to understand the difference.

I'm noob at WebGL. I read in several posts of ND-Buffers and G-Buffers as if it were a strategic choice for WebGL development.

How are ND-Buffers and G-Buffers related to rendering pipelines? Are ND-Buffers used only in forward-rendering and G-Buffers only in deferred-rendering?

A JavaScript code example how to implement both would be useful for me to understand the difference.

Share Improve this question edited Jun 10, 2016 at 13:30 Tony Hinkle 4,7427 gold badges24 silver badges35 bronze badges asked Jun 10, 2016 at 9:48 deblockerdeblocker 7,6972 gold badges25 silver badges64 bronze badges 2
  • Can you share links to the posts? – Kirill Dmitrenko Commented Jun 10, 2016 at 12:06
  • here a juicy one: (upmons.upc.edu/bitstream/handle/2117/82591/114624.pdf) – deblocker Commented Jun 10, 2016 at 12:41
Add a ment  | 

3 Answers 3

Reset to default 10

G-Buffers are just a set of buffers generally used in deferred rendering.

Wikipedia gives a good example of the kind of data often found in a g-buffer

Diffuse color info

World space or screen space normals

Depth buffer / Z-Buffer

The bination of those 3 buffers is referred to as a "g-buffer"

Generating those 3 buffers from geometry and material data you can then run a shader to bine them to generate the final image.

What actually goes into a g-buffer is up to the particular engine/renderer. For example one of Unity3D's deferred renders contains diffuse color, occlusion, specular color, roughness, normal, depth, stencil, emission, lighting, lightmap, reflection probs.

An ND buffer just stands for "normal depth buffer" which makes it a subset of what's usually found in a typical g-buffer.

As for a sample that's arguably too big for SO but there's an article about deferred rendering in WebGL on MDN

Choosing a rendering path is a major architectural decision for a 3D renderer, no matter what API does it use. That choice's heavily depends upon the set of features the renderer has to support and it's performance requirements.

A substantial set of said features consists of so-called screen-space effects. If means that we render some crucial data about each pixel of the screen to a set of renderbuffers and then using that data (not the geometry) to pute some new data needed for a frame. Ambient Occlusion is a great example of such an effect. Based on some spacial values of pixels we pute a "mask" which we can later use to properly shade each pixel.

Moreover, there is a rendering pass which almost exclusively relies on screen-space putations. And it is indeed Deferred Shading. And that's where G-buffer e in. All data needed to pute colour of a pixel are rendered to a G-buffer: a set of renderbuffers storing that data. The data it self (and hence meanings of G-buffer's renderbuffers) can be different: diffuse ponent, specular ponent, shininess, normal, position, depth, etc. And as part of rendering of a frame contemporary deferred shading engines use screen-space ambient occlusion (SSAO), which use data from several G-buffer's renderbuffers (usually, they are position, normal and depth).

About ND-buffers. It seems to me that it's not a widely used term (Google failed to find any relevant info on them besides this question). I believe that ND stands for Normal-Depth. They're just a specific case of a G-buffer for a particular algorithm and effect (in the thesis it's SSAO).

So using G-buffers (and ND-buffers as a subset of G-buffers) and exadepends upon shading algorithms and effects you're implementing. But all screen-space putation will require some form of G-buffer.

P.S. The thesis you've link contains an inaccuracy. Author lists an ability to implement to ND-buffers on GLES 2.0 as an advantage to the method. However it's not actually possible since GLES 2.0 doesn't have depth textures (they've been added in OES_depth_texture extension).

I would like to add some more informations to previvous answers.

I read in several posts of ND-Buffers and G-Buffers as if it were a strategic choice for WebGL development.

One of the most important part of deferred rendering is, if given platform supports MRT (multiple render targets). If it doesn't, you are not able to share partial calculations in shaders between each rendering and it also forces you to run rendnering as many times as you have "layers" (in case of unity 3D, it might be up to 11 times?). This could slow down your program a lot.

Read more in this question Is deferred rendering/shading possible with OpenGL ES 2.0 ?

Webgl doesn't support MRT, but it has extension: https://www.khronos/registry/webgl/extensions/WEBGL_draw_buffers/

Also there is an extension for depth textures: https://www.khronos/registry/webgl/extensions/WEBGL_depth_texture/

So it should be possible to use deferred rendering technique, but its speed is hard to guess.

发布评论

评论列表(0)

  1. 暂无评论