<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0"
	xmlns:content="http://purl.org/rss/1.0/modules/content/"
	xmlns:wfw="http://wellformedweb.org/CommentAPI/"
	xmlns:dc="http://purl.org/dc/elements/1.1/"
	xmlns:atom="http://www.w3.org/2005/Atom"
	xmlns:sy="http://purl.org/rss/1.0/modules/syndication/"
	xmlns:slash="http://purl.org/rss/1.0/modules/slash/"
	>

<channel>
	<title>UgoTrade &#187; open distributed augmented reality</title>
	<atom:link href="http://www.ugotrade.com/tag/open-distributed-augmented-reality/feed/" rel="self" type="application/rss+xml" />
	<link>http://www.ugotrade.com</link>
	<description>Augmented Realities at the Edge of the Network</description>
	<lastBuildDate>Wed, 25 May 2016 15:59:56 +0000</lastBuildDate>
	<language>en-US</language>
		<sy:updatePeriod>hourly</sy:updatePeriod>
		<sy:updateFrequency>1</sy:updateFrequency>
	<generator>https://wordpress.org/?v=3.9.40</generator>
	<item>
		<title>Urban Augmented Realities and Social Augmentations that Matter: Talking with Bruce Sterling, Part 2</title>
		<link>http://www.ugotrade.com/2010/09/17/urban-augmented-realities-and-social-augmentations-that-matter-interview-with-bruce-sterling-part-2/</link>
		<comments>http://www.ugotrade.com/2010/09/17/urban-augmented-realities-and-social-augmentations-that-matter-interview-with-bruce-sterling-part-2/#comments</comments>
		<pubDate>Fri, 17 Sep 2010 21:43:35 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[Android]]></category>
		<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Artificial Intelligence]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[Mobile Technology]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[social gaming]]></category>
		<category><![CDATA[social media]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Web 2.0]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[World 2.0]]></category>
		<category><![CDATA[3D point clouds]]></category>
		<category><![CDATA[an ARG for World Peace]]></category>
		<category><![CDATA[Anselm Hook]]></category>
		<category><![CDATA[AR Wave]]></category>
		<category><![CDATA[are2010]]></category>
		<category><![CDATA[ARWave Android client]]></category>
		<category><![CDATA[ARWave at Software Freedom Day]]></category>
		<category><![CDATA[augmented foraging]]></category>
		<category><![CDATA[augmented reality checkins]]></category>
		<category><![CDATA[augmented reality event]]></category>
		<category><![CDATA[Bertine van Hovell]]></category>
		<category><![CDATA[Biological Globalisation]]></category>
		<category><![CDATA[Boskoi]]></category>
		<category><![CDATA[Bruce Sterling]]></category>
		<category><![CDATA[Crisis Filter]]></category>
		<category><![CDATA[cryptoforests]]></category>
		<category><![CDATA[Davide Carnovale]]></category>
		<category><![CDATA[deterritorialization]]></category>
		<category><![CDATA[difference between augmented reality and ubiquitous computing]]></category>
		<category><![CDATA[emergency response]]></category>
		<category><![CDATA[Favela Chic]]></category>
		<category><![CDATA[fightthegooglejugend]]></category>
		<category><![CDATA[Four Square]]></category>
		<category><![CDATA[Gamepocalypse]]></category>
		<category><![CDATA[gardens gone wild]]></category>
		<category><![CDATA[Gene Becker]]></category>
		<category><![CDATA[google goggles]]></category>
		<category><![CDATA[Google Wave]]></category>
		<category><![CDATA[gowalla]]></category>
		<category><![CDATA[homophilies]]></category>
		<category><![CDATA[hyperlocal experiences]]></category>
		<category><![CDATA[interview with Bruce Sterling]]></category>
		<category><![CDATA[JCPT the open Android 3D engine]]></category>
		<category><![CDATA[Jesse James Garrett]]></category>
		<category><![CDATA[Jesse Schell]]></category>
		<category><![CDATA[Joshua Kauffman]]></category>
		<category><![CDATA[Ken Eklund]]></category>
		<category><![CDATA[Kooaba]]></category>
		<category><![CDATA[Layar]]></category>
		<category><![CDATA[Lightning Laboratories]]></category>
		<category><![CDATA[location based social networking]]></category>
		<category><![CDATA[Maarten Lens-FitzGerald]]></category>
		<category><![CDATA[machine intelligence]]></category>
		<category><![CDATA[machine learning]]></category>
		<category><![CDATA[Mark Evin]]></category>
		<category><![CDATA[Markus Strickler]]></category>
		<category><![CDATA[NextHope]]></category>
		<category><![CDATA[NextHope AMD]]></category>
		<category><![CDATA[Occipital]]></category>
		<category><![CDATA[open distributed augmented reality]]></category>
		<category><![CDATA[open distributed platform for AR]]></category>
		<category><![CDATA[physical world platform]]></category>
		<category><![CDATA[proximity-based social networking]]></category>
		<category><![CDATA[psychogeography]]></category>
		<category><![CDATA[real-time information brokerages]]></category>
		<category><![CDATA[realtime information brokerages]]></category>
		<category><![CDATA[Shaping Things]]></category>
		<category><![CDATA[ShapingThings]]></category>
		<category><![CDATA[Sixth Sense for Autism]]></category>
		<category><![CDATA[SMSSlingshot]]></category>
		<category><![CDATA[social augmented experiences]]></category>
		<category><![CDATA[Social Augmented Experiences that Matter]]></category>
		<category><![CDATA[social mapping]]></category>
		<category><![CDATA[Software Freedom Day]]></category>
		<category><![CDATA[Swift]]></category>
		<category><![CDATA[territorialization]]></category>
		<category><![CDATA[The Cryptoforests of Utrecht]]></category>
		<category><![CDATA[Thomas Wrobel]]></category>
		<category><![CDATA[Tonchidot]]></category>
		<category><![CDATA[Ubistudio]]></category>
		<category><![CDATA[urban augmented realities]]></category>
		<category><![CDATA[Urban Edibles Amsterdam]]></category>
		<category><![CDATA[urban fallows]]></category>
		<category><![CDATA[urban forsts]]></category>
		<category><![CDATA[urban informatic mapping]]></category>
		<category><![CDATA[urban informatics]]></category>
		<category><![CDATA[Ushahidi]]></category>
		<category><![CDATA[vision assisted augmented reality]]></category>
		<category><![CDATA[vision based augmented reality]]></category>
		<category><![CDATA[visual search]]></category>
		<category><![CDATA[Wave in a Box]]></category>
		<category><![CDATA[WaveinaBox]]></category>
		<category><![CDATA[Westraven Psychogeography]]></category>
		<category><![CDATA[Will Wright at Augmented Reality Event]]></category>
		<category><![CDATA[YDreams]]></category>
		<category><![CDATA[Zorop]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=5627</guid>
		<description><![CDATA[Social Augmented Experiences leveraging geoawareness and human and machine intelligence to create real time information brokerages, combined with an augmented reality view, can create a new opportunities to reimagine our relationships with each other and our environment. This Summer, I have been on a blogging hiatus, which has meant I haven&#8217;t been sharing as frequently [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><strong><strong><span> </span></strong></strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/09/augmentedforaging1.jpg"><img class="alignnone size-medium wp-image-5651" title="augmentedforaging" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/09/augmentedforaging1-200x300.jpg" alt="augmentedforaging" width="200" height="300" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/09/westraven81.JPG"><img class="alignnone size-medium wp-image-5652" title="westraven8" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/09/westraven81-225x300.jpg" alt="westraven8" width="225" height="300" /></a></p>
<p>Social Augmented Experiences leveraging geoawareness and human and machine intelligence to create real time information brokerages, combined with an augmented reality view, can create a new opportunities to reimagine our relationships with each other and our environment.</p>
<p>This   Summer, I have been on a blogging hiatus, which has meant I haven&#8217;t   been sharing as  frequently and, unfortunately, the second half of two conversations I had earlier this year, both of which have much influence my thinking on social augmented reality, have languished in private mode -Â  part 2 of my talk with Bruce  Sterling (see <a title="Permanent Link to Interview with Bruce Sterling, Part I: At the 9am of the Augmented Reality Industry, are2010" rel="bookmark" href="../../2010/06/16/interview-with-bruce-sterling-part-i-at-the-9am-of-the-augmented-reality-industry-are2010/">Interview with Bruce Sterling, Part I: At the 9am of the Augmented Reality Industry, are2010</a>, and part 2 of my conversation with Anselm   Hook <a title="Permanent Link to Visual Search, Augmented Reality and a Social Commons for the Physical World Platform: Interview with Anselm Hook" rel="bookmark" href="../../2010/01/17/visual-search-augmented-reality-and-a-social-commons-for-the-physical-world-platform-interview-with-anselm-hook/">- Visual Search, Augmented Reality and a Social Commons for the Physical World Platform: Interview with Anselm Hook, Part 1.</a> Time to get caught up on some blogging!Â  The lightly edited transcript of Part 2 of <a href="#tag1">my conversation with Bruce Sterling is posted in full below</a>.</p>
<p>Bruce Sterling has been blogging all the key developments in augmented reality (amongst other topics of interest!) on <a href="http://www.wired.com/beyond_the_beyond/" target="_blank">his Wired Blog</a>, and <a href="http://www.wired.com/beyond_the_beyond/2010/08/augmented-reality-augmented-foraging/" target="_blank">he brought my attention</a> to <a href="http://libarynth.org/augmented_foraging">Boskoi</a> the <a title="http://www.ushahidi.com/" rel="nofollow" href="http://www.ushahidi.com/">Ushahidi</a> based app for Android phones, <a href="http://lib.fo.am/augmented_foraging" target="_blank">augmented foraging </a>pictured in use above &#8211; for more pics see<span> <a href="http://fightthegooglejugend.com/index.html" target="_blank">fightthegooglejugend</a>. </span></p>
<p><span><br />
</span></p>
<h3><strong><strong>Augmented Reality and Real Time Information Brokerages</strong></strong></h3>
<p><span><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/08/Screen-shot-2010-08-28-at-12.53.54-AM.png"><img class="alignnone size-medium wp-image-5630" title="Screen shot 2010-08-28 at 12.53.54 AM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/08/Screen-shot-2010-08-28-at-12.53.54-AM-300x176.png" alt="Screen shot 2010-08-28 at 12.53.54 AM" width="300" height="176" /></a><br />
</span></p>
<p><em><span>Picture above is the path the &#8220;nomads&#8221; took through the Westhaven cryptoforest with Pieter Bol,co-auteur of the book <a href="http://www.biologicalglobalisation.com/">Biological Globalisation</a> and Theun Karelse of <a href="http://urbanedibles.blogspot.com/">Urban Edibles Amsterdam</a> &#8220;who presented his &#8216;augmented foraging&#8217; app <a href="http://libarynth.org/augmented_foraging">Boskoi.</a>&#8220;Â   For more see, <a href="http://fightthegooglejugend.com/cryptoforests.html" target="_blank">The Cryptoforests of Utrecht </a>and, <a href="http://fightthegooglejugend.com/westraven.html" target="_blank">Westra</a><a href="http://fightthegooglejugend.com/westraven.html" target="_blank">ven Psychogeography, 6 June 2010.</a> </span><span> </span><span>Note</span><span>: Cryptoforests: 1) Urban forests hidden from view 2) Urban fallows that might or might </span><span> </span><span>not be considered as forests 3) Gardens gone wild)</span></em></p>
<p><strong> </strong></p>
<p>My interest in the Ushahidi family of ideas was already fired up by a conversation with <a href="http://www.hook.org/" target="_blank">Anselm Hook</a> early this year.Â  We discussed a number of <a href="http://vimeo.com/ushahidi">Ushahidi</a> related    projects, <a href="http://swift.ushahidi.com/" target="_blank">Swift</a>, Crisis Filter and Anselm&#8217;s project <a href="http://hook.org/" target="_blank">Angel</a>, Augmented    Reality, and my own keen interest in an open, real time, distributed platform for    augmented reality &#8211; <a href="http://www.arwave.org/" target="_blank">ARWave</a>.</p>
<p>The Ushahidi platform and the related project Swift has pioneered the real  time brokerage of information with people acting in curatorial roles or  matchmaking roles coevolving with machine assisted  matching to connect wants to haves.Â  Ushahidi uses multiple gateways including SMS, and Twitter.Â  But the Ushahidi family of ideas is extremely interesting when combined with augmented reality and suggests many new possibilities for social augmented experiences, as Anselm pointed out, for human to human communications, human  to  civilization communication, and human to environment communications (e.g., perhaps, how machine intelligence can help bridge the difference in time scale that Kate Hartman explores in her, <a href="http://vimeo.com/10352604"> Research for Glacier-Human Communication Techniques).</a></p>
<p>Ushahidi, which means &#8220;testimony&#8221; in Swahili, is a website that was    initially  developed to map reports of violence in Kenya after the post-election  fallout at the beginning of 2008.  It is now an open platform with a wide range of applications and growing developer community.Â  See <a href="http://vimeo.com/7838030">What is  the Ushahidi Platform?</a> from <a href="http://vimeo.com/ushahidi">Ushahidi</a> on <a href="http://vimeo.com/">Vimeo</a>.</p>
<p><a href="http://swift.ushahidi.com/" target="_blank">Swift </a>- a project that emerged from the Ushahidi dev community, is a human sensor/real-time brokerage for dealing with emergencies, enabling the filtering and verification of real-time data from channels such as Twitter, SMS, Email and RSS feeds.</p>
<p><a href="http://libarynth.org/augmented_foraging">Boskoi</a> &#8211; <a href="http://lib.fo.am/augmented_foraging" target="_blank">augmented foraging </a><span>is the first app,Â  I have seen, to begin linking Ushahidi with augmented reality  &#8211; although I don&#8217;t think there is a full augmented view for Boskoi developed yet?</span></p>
<h3><strong>&#8220;The whole point of AR is to see things from a different point of view&#8230;&#8221;</strong></h3>
<p><strong><br />
</strong></p>
<p><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/09/ARWaveCurrentStatus3post.png"><img class="alignnone size-medium wp-image-5705" title="ARWaveCurrentStatus3post" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/09/ARWaveCurrentStatus3post-300x212.png" alt="ARWaveCurrentStatus3post" width="300" height="212" /></a><br />
</strong></p>
<p><strong> </strong></p>
<p><em>Click to enlarge poster from upcoming ARWave demo at Software Freedom Day &#8211; for more see below</em></p>
<p>I am often asked what augmented reality brings to the table with respect to location based social networking, which is on the verge of going mainstream in smart phone apps like <a href="http://foursquare.com/">Four Square</a>. While the first part to my answer is usually to explain what is unique to augmented reality.</p>
<p>As Bo Begole notes, the full vision of AR requires machine   perception  technologies to detect  the identity and physical   configuration of  objects relative to each  other to accurately project   information  alongside/overlaid with a physical object (see this post on the PARC Blog by Bo Begole on the <a href="http://bit.ly/9Rsh79">difference between AR and ubiquitous computing</a> &#8211; thank you <a href="http://gamesalfresco.com/2010/09/12/weekly-linkfest-62/" target="_blank">Rouli for bringing my attention to this</a>).</p>
<p>But it is only in recent months that we have begun to see the kind of tools that make this possible become freely available to developers &#8211; see<a href="http://www.ugotrade.com/2010/08/05/vision-based-augmented-reality-ar-in-smart-phones-qualcomms-ar-sdk-interview-with-jay-wright/" target="_blank"> my interview with Jay Wright of Qualcomm here</a>. Â  Also see this post on <a href="http://phototour.cs.washington.edu/bundler/" target="_blank">Bundler: Structure from Motion for Unordered Image Collections</a> an open source system that allows the creation of 3D point clouds from unordered image collections, e.g. internet image collections.Â  We now have many tools available to move mobile augmented reality beyond the recent crop of apps relying on GPS and compass alone for positioning into a new era of vision assisted AR apps that will increasingly bring the full vision of AR into our daily lives.</p>
<p>Further, the  integration of visual search  applications   like <a href="http://www.google.com/mobile/goggles/#text">Google Goggles</a> and <a href="http://www.kooaba.com/">Kooaba</a> which can detect the identity of particular objects will add another vital tool to machine perception technologies enabling AR &#8220;checkins&#8221; on potentially anything in the physical world around us, and more fuel to the <a href="http://gamepocalypsenow.blogspot.com/">Gamepocalypse</a> (e.g. it would be easy to turn every trash can in the city into a basketball hoop as we discussed at the <a href="http://www.meetup.com/ARNY-Augmented-Reality-New-York/" target="_blank">ARNY</a> meetup last month).Â   And soon, the Pandora&#8217;s Box ofÂ  facial recognition (Google Goggles have the capability though it is not released to the  public  yet) will open up.</p>
<p>Jesse Schell described the importance of AR in a nutshell <a href="http://augmentedrealityevent.com/2010/08/25/are2010-keynote-by-jesse-schell-augmented-reality-will-define-the-21st-century/" target="_blank">in his keynote for are2010</a>:</p>
<p><strong>â€œThe  whole point of AR is to see things from a different point of  viewâ€¦How  can there be a more powerful art form than one that actually  changes  what you see?â€</strong></p>
<p>But how AR matures as a social experience will be the key to Jesse&#8217;s suggestion that:</p>
<p><strong>â€œAugmented Reality will be one of the things that fundamentally define the 21st centuryâ€</strong></p>
<p>There are many interesting forms of AR that are not reliant on a tight  registration between media and physical objects &#8211; several are put forward by Bruce in the convo below.Â  And, it is likely we will see AR eyewear as an occasional useful accessory to a smart phone long before we have the sexy, affordable augmented reality eyewear worn that we wear throughout the day. Â  <a href="http://www.yankodesign.com/2010/08/31/speech-to-text-glasses/" target="_blank">These speech to text glasses</a> would be a very useful and viable accessory to a smart phone right now for the hearing impaired.</p>
<p>For the moment, as Bruce notes, some of the most interesting and useful augmented experiences to date have not been in the cell phone space:</p>
<p><strong> &#8220;There are other aspects of AR besides the cell phone space. Thereâ€™s  Total Immersion&#8217;s big display screens. Thereâ€™s the web-based fiduciary  stuff. And thereâ€™s projection mapping. And then thereâ€™s experience  design just for people who need their reality augmented for whatever  personal or social reason.&#8221;</strong></p>
<p>On of my favorite social AR experiences is this<a href="http://www.youtube.com/watch?v=oLnKSKaY1Yw&amp;feature=player_embedded" target="_blank"> SMS Slingshot</a>.</p>
<p>But I have been excited for a long while about the intersection of mobile social augmented    reality, real time communications, and ubiquitous computing see <a title="Permanent Link to Total Immersion and the â€œTransfigured City:â€ Shared Augmented Realities, the â€œWeb Squared Era,â€ and Google Wave" rel="bookmark" href="../../2009/09/26/total-immersion-and-the-transfigured-city-shared-augmented-realities-the-web-squared-era-and-google-wave/">Total Immersion and the â€œTransfigured City:â€ Shared Augmented Realities, the â€œWeb Squared Era,â€ and Google Wave</a>.Â    And I have  described in    many places why I think real time, open,   distributed communications  for AR are so    important to developing social augmented experiences &#8211; see <a href="http://www.slideshare.net/TishShute/ar-wave-a-proof-of-concept-federation-game-dynamics-semantic-search-mobile-social-communications" target="_blank">the slides for my talk at Augmented Reality Event here</a>, <a href="../../2010/04/02/ar-wave-at-where-2-0-exploring-social-augmented-experiences/" target="_blank">here</a> and <a href="http://www.mobilemonday.nl/talks/tish-shute-the-next-wave-of-ar/" target="_blank">here</a> for starters.</p>
<p><strong><br />
</strong></p>
<h3><strong> ARWave at Software Freedom Day 2010, September 18th 2010<br />
</strong></h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/09/Screen-shot-2010-09-17-at-12.12.02-PM.png"><img class="alignnone size-medium wp-image-5683" title="Screen shot 2010-09-17 at 12.12.02 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/09/Screen-shot-2010-09-17-at-12.12.02-PM-300x38.png" alt="Screen shot 2010-09-17 at 12.12.02 PM" width="300" height="38" /></a></p>
<p>Thomas Wrobel and Bertine van Hovell will demo the first ARWave Android client <a href="http://www.sfd2010.nl/" target="_blank">at Software Freedom Day this weekend</a>!</p>
<p>A number of people have asked me, (including Bruce), What will be the future of ARWave now that Google Wave is no longer a stand alone application?Â  Yes, the recently announced release of <a href="http://googlewavedev.blogspot.com/2010/09/wave-open-source-next-steps-wave-in-box.html" target="_blank">Wave in a Box</a> (see <a href="http://arstechnica.com/web/news/2010/09/google-sticks-wave-source-in-a-box-sticks-a-bow-on-top.ars" target="_blank">here </a>and<a href="http://www.readwriteweb.com/archives/google_announces_wave_in_a_box.php" target="_blank"> here</a>) is very exciting for the ARWave team.</p>
<p>The ARWave Android client is the  first open AR client built on an open, real time, distributed platform -Â  based on a server that anyone can download and set up, currently the  &#8220;FedOne&#8221; server but Wave in a Box, hopefully,  will be even easier to deploy.Â  Wave in a Box seems perfect for ARWave&#8217;s needs &#8211;  for more <a href="https://groups.google.com/group/wave-protocol/browse_thread/thread/70067fc740b4c8d3" target="_blank">see the WiaB Google Group here</a>.Â   And for more information on the ARWave client -Â  click to enlarge the poster below, see the <a href="http://arwave.org/pages/Videos.php" target="_blank">ARWave concept video here</a>, and for more, and how to get involved see <a href="http://arwave.org/new_index.php" target="_blank">arwave.org</a>.Â Â  Props to <a href="http://www.lostagain.nl/#" target="_blank">Thomas Wrobel and Bertine van Hovell</a> (posters below from demo for Software Freedom Day), Mark Evin, <a href="http://twitter.com/need2revolt" target="_blank">Davide Carnovale</a>, and <a href="http://twitter.com/kusako" target="_blank">Markus Strickler</a>, for all their hard and brilliant work on ARWave.Â  Also to <a href="http://www.jpct.net/" target="_blank">JCPT the open Android 3D engine</a> that has saved a lot of work!</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/09/ARWaveCurrentStatus1post.png"><img class="alignnone size-medium wp-image-5687" title="ARWaveCurrentStatus1post" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/09/ARWaveCurrentStatus1post-212x300.png" alt="ARWaveCurrentStatus1post" width="212" height="300" /></a></p>
<p><em>click to enlarge slide</em></p>
<h3><strong>Social Augmented Experiences that Matter</strong></h3>
<p>My ideas on the future of social augmented experience have been deeply informed by the the conversations I had with Bruce Sterling and Anselm Hook this year.</p>
<p>Bruce  Sterling notes in the conversation below, location based social  apps like, Four Square, are interesting because they are not <strong> &#8220;urban geography like Google&#8217;s  satellite stare from above,&#8221;</strong> but  rather <strong>&#8220;groups of citizens are doing portraits  of their own region.&#8221; </strong> Augmented Reality, with its of lauded power to make the invisible visible is, of course, is the ideal tool for &#8220;citizen portraits&#8221;Â  to the next level.Â  Cory Doctorow  described to me three years ago (<a href="http://www.ugotrade.com/2007/10/31/cory-doctorow-a-reverse-surveillance-society/" target="_blank">see here</a>) an &#8220;inverse surveillance society,&#8221; enabled by an augmented viewÂ  &#8211; &#8220;<strong>where all the data from the positional and temporal  characteristics of all the objects that we own  were in aggregate  visible and available so that we can mix and match them  remix them  understand them and have more agency in the world.&#8221;</strong></p>
<p>It is very cool to go back to reread <a href="http://www.ugotrade.com/2007/10/31/cory-doctorow-a-reverse-surveillance-society/" target="_blank">this  conversation </a>now that it is becoming possible to build the kinds of apps Cory described, and Bruce Sterling envisioned in <strong><a href="http://mitpress.mit.edu/catalog/item/default.asp?tid=10603&amp;ttype=2" target="_blank">Shaping Things</a></strong> (see Amazon.orgÂ  page 111).</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/09/shapingthings.jpg"><img class="alignnone size-thumbnail wp-image-5689" title="shapingthings" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/09/shapingthings-150x150.jpg" alt="shapingthings" width="150" height="150" /></a></p>
<p><em>click to enlarge</em></p>
<p>MyÂ  conversation with Bruce earlier this summer (see below) took place on the heels of <a href="http://augmentedrealityevent.com/">are2010 &#8211; Augmented Reality Event</a>.Â Â  <a href="http://augmentedrealityevent.com/2010/06/06/are-2010-keynote-by-bruce-sterling-build-a-big-pie/" target="_blank">See the video of Bruce&#8217;s keynote, &#8220;Bake a BigPie,&#8221; here</a>,Â  and the <a href="http://augmentedrealityevent.com/2010/08/25/are2010-keynote-by-jesse-schell-augmented-reality-will-define-the-21st-century/" target="_blank">final keynote, &#8220;Seeing,&#8221; by Jesse Schell (see video here)</a> in which Jesse riffed on AR and the man with the X-ray eyes.Â  Both these awesome talks are still fresh in my mind.Â  Bruce noted how we should pay attention to augmentations for people and situations that could really use some augmentation&#8230; and not get too fixated on the coming of AR Goggles.Â  He elaborated on this in our conversation (again full transcript below):</p>
<p><strong>&#8220;Well,  itâ€™s a matter of deciding whose reality it is that youâ€™re  trying to augment.  Iâ€™m not trying to be a bleeding heart about it, but  obviously there are people in our society right now with reality that  could really use some augmentation.  They are mostly disadvantaged  people.  They are vision impaired, or maybe they have autism.  They  might be senile and just canâ€™t remember where they put their shoes.   These are people who could really use some help, right?&#8221;</strong></p>
<p><strong>So, start  with people who really need sensory or cognitive help. Before you  turn  our geeks into Superman, why donâ€™t you try turning some people who are  harmed into more functional individuals?  Then youâ€™ll be able to learn  how to do that. Then maybe you can ramp it up to these Nietzschian  heights of the superb Man With the X-ray Eyes.  Whatever.&#8221;</strong></p>
<p>What will make AR interesting and useful long before and long after we see the full vision of AR eyewear manifest is its social aspects.Â  Bruce points out:</p>
<p><strong>&#8220;My  argument would be that if you want people to be  more sensitive toward   certain, say, issues and problems, itâ€™s better to  find the people who   are already sensitive to those issues and  problems, and give them a   bigger stake in your augmentation system.&#8221;</strong></p>
<p><strong>&#8220;Say that I am really worried about public health.   Well, if I have a lot of nurses that are using my system, people who are  aware of my issues, then I could be walking around and Iâ€™ll see a lot  more tags saying, â€œThis is where he got food poisoning!â€  &#8220;In this  shooting gallery, many people have caught AIDS!â€  Or, you know,  â€œTuberculosis has been spotted over here in this building.â€</strong></p>
<p><strong>At  that point, I could simply share their knowledge and get some social  intelligence.  As opposed to trying to  amp the basements of my little  hacker-mind and drag stuff up thatâ€™s escaped my conscious attention.&#8221;</strong></p>
<p>Finding new ways to broker information &#8211; bring together needs with haves and different participants, empowered and disempoweredÂ  is., as Anselm discussed with me, one way to change our view of human to human, human to environment and human to civilization communication (particularly in light of thisÂ  &#8220;sobering account of how open data is used against the poor in Bangalore&#8221; that as <a href="http://twitter.com/timoreilly/status/23179898934" target="_blank">@timoreilly noted</a> recently <a href="http://gurstein.wordpress.com/2010/09/02/open-data-empowering-the-empowered-or-effective-data-use-for-everyone/" target="_blank">OpenData Empowering the Empowered)</a>.</p>
<p>The key idea in a crisis filter, Anselm noted,Â  was to break  up the participants into different kinds, to connects wants with haves:</p>
<p><strong>&#8220;There are  people who are  inÂ  situation.Â  We call them citizens.Â  And  then there  are reporters,  people who report situations back to Twitter.Â  And then there are curators, people that canvas Twitter    looking for important Tweets.Â  And then there are first responders, people who take the curating collection of responses and then act on them.&#8221;</strong></p>
<p>This kind of brokerage between people acting in a curatorial role or matchmaking role with each other can be extended into and coevolve with machine assisted matching as Anselm explains.</p>
<p>It is also a vital part of creating social augmented experiences that matter.</p>
<p>One of Anselm Hook&#8217;s projects, which is called <a href="http://hook.org/" target="_blank">Angel</a> is the the most radical expression of connecting wants with haves in that the  idea is that &#8220;you have a  situation, you broadcast that  situation, and help  magically appears.Â   You donâ€™t even sign up forÂ a service.Â  You just get  help â€¦</p>
<p>As Anselm explains this is the same idea of a brokerage for dealing with emergencies, but applied to the long tail of crisis response.Â  As Anselm describes it:</p>
<p><strong><strong>&#8220;I am interested in personal crisis.Â  &#8216;I lost my cat.Â  Help.Â  I canâ€™t find </strong>where my kid is.Â  I am out of gas.Â  I have a flat tire.Â  My house is on fire.Â  My aunt is trapped in the bedroom.&#8217;Â  The kind of personal crisis    that is just as important, but is not enough to get a national  movement   to help you&#8230;</strong></p>
<p>I will publish this conversation with Anselm in full in an upcoming post.</p>
<h3>Zorop &#8211; an ARG for World Peace</h3>
<p><strong><strong><span> </span></strong></strong><a href="http://libarynth.org/augmented_foraging"><span style="font-family: 'times new roman';"><span style="font-size: small;"> </span></span></a>If you want to be part of a really exciting experiment to reimagine our relationships with each other and can be in San Jose this weekend, I highly recommend exploring <a href="http://zorop.org" target="_blank">this &#8220;rabbit hole&#8221;</a>.</p>
<p><object classid="clsid:d27cdb6e-ae6d-11cf-96b8-444553540000" width="640" height="385" codebase="http://download.macromedia.com/pub/shockwave/cabs/flash/swflash.cab#version=6,0,40,0"><param name="allowFullScreen" value="true" /><param name="allowscriptaccess" value="always" /><param name="src" value="http://www.youtube.com/v/czUpYfme0kg?fs=1&amp;hl=en_US" /><param name="allowfullscreen" value="true" /><embed type="application/x-shockwave-flash" width="640" height="385" src="http://www.youtube.com/v/czUpYfme0kg?fs=1&amp;hl=en_US" allowscriptaccess="always" allowfullscreen="true"></embed></object></p>
<p>Thank you <a href="http://www.lightninglaboratories.com/tcw/about-2/" target="_blank">Gene Becker</a>, <a href="http://www.lightninglaboratories.com/" target="_blank">Lightning Laboratories</a> and <a href="http://ubistudio.org/" target="_blank">Ubistudio</a> for sending me this invite:</p>
<p><strong>&#8220;Ken  Eklund (<a href="http://twitter.com/writerguygames" target="_blank">@writerguygames</a>) is developing a wonderful game for the 01SJ  Biennial called ZOROP, aimed at creating World Peace(!). Some of you  might know Ken from his work on the amazing ARGs EVOKE and World Without  Oil. Anyway Ken, along with his collaborator Annette Mees, are  furiously working to get ZOROP ready to go for the Sept 17th premiere at  01SJ.</strong></p>
<p><strong>Are you intrigued? I thought so, and here are your next steps down the rabbit hole:</strong> <strong> </strong></p>
<p><strong>&gt; Check out </strong> <strong><a href="http://zorop.org/" target="_blank">http://zorop.org</a> to learn about the game</strong></p>
<p><strong>&gt; Follow @ZoropPrime to watch it unfold: </strong> <strong><a href="http://twitter.com/zoropprime" target="_blank">http://twitter.com/zoropprime</a></strong></p>
<p><strong>&gt; &#8216;Like&#8217; ZOROP on FB for a different view: </strong> <strong><a href="http://www.facebook.com/pages/Zorop/141140772593618" target="_blank">http://www.facebook.com/pages/Zorop/141140772593618</a></strong></p>
<p><strong>&gt; Become one with the game; consider volunteering as a Zoropathian: </strong> <strong><a href="mailto:curious@zorop.org">curious@zorop.org</a></strong></p>
<p><strong>&gt; Head down to San Jose on the 17th, play the game, and ride the ZOROP Mexican Party Bus. Seriously.&#8221;</strong></p>
<p style="margin: 0pt;">
<p><strong><br />
</strong></p>
<h3><strong>Interview with Bruce Sterling</strong><strong> </strong><a name="tag1"></a></h3>
<p><a href="http://www.flickr.com/photos/brucesterling/4671866157/in/photostream/" target="_blank"><img class="alignnone size-medium wp-image-5676" title="Screen shot 2010-09-16 at 7.59.56 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/09/Screen-shot-2010-09-16-at-7.59.56-PM-300x180.png" alt="Screen shot 2010-09-16 at 7.59.56 PM" width="300" height="180" /></a></p>
<p><em>Click on image above to see video clip from</em> <em><a href="http://www.flickr.com/photos/brucesterling/4673885122/" target="_blank"><em>from brucesflickr</em></a></em></p>
<p>[Note the<a href="http://www.ugotrade.com/2010/06/16/interview-with-bruce-sterling-part-i-at-the-9am-of-the-augmented-reality-industry-are2010/" target="_blank"> first part of this interview is here</a> and I broke in anticipation of Part 2 just as I started experimenting with an idea <a href="http://www.linkedin.com/in/joshuakauffman" target="_blank">Joshua Kauffman</a> &#8211; an advisor and entrepreneur working on design  in the public sphere gave me for an interview technique &#8211; the All Souls College one-word  question interview.Â  Although apparently <a href="http://www.nytimes.com/2010/05/28/world/europe/28oxford.html" target="_blank">they recently scrapped it</a> and I am not very good to sticking to a single word!]</p>
<p><strong>Tish  Shute:</strong> We were talking about these proximity-based social work networks like Foursquare and Gowalla and how they may influence the emergence of social augmented experiences.</p>
<p>So Joshua&#8217;s suggestion for the first word was &#8220;territorialization&#8221; e.g. how do these new mobile social experiences like Foursquare,  and the observation that actually rather than breaking down territorialization &#8211; which would be a good thing, tend to support territorialization&#8230;</p>
<p><strong>Bruce Sterling: Yeah, theyâ€™re re-intensifying it in a very odd, electronic fashion.</strong></p>
<p><strong>Tish Shute:</strong> Yes.</p>
<p><strong>Bruce Sterling:  Itâ€™s not true of  projection mapping or the webcam fiduciary display stuff. But with the handheld stuff, and especially the urban informatic stuff, it really canâ€™t help but take on a local flavor. <a href="http://www.layar.com/" target="_blank">Layar</a> is like &#8220;Augmented Dutch Reality.&#8221;</strong></p>
<p><strong>And <a href="http://www.tonchidot.com/" target="_blank">TonchiDot</a> is &#8220;Augmented Japanese Reality.&#8221; Itâ€™s hard to imagine a Layar interface going gangbusters at Tokyo.  Whereas the TonchiDot interface, which is so clearly influenced by Anime and cartoon graphics&#8230;. Maybe it could find some niche of hipsters in Amsterdam hash barsâ€¦</strong></p>
<p><strong>Stuff that&#8217;s socially generated by people on the ground, as with Foursquare and Gowalla, is bound to take on a regional influence. Right? It&#8217;s like the New York hipsters who were early adopters of Foursquare. They&#8217;re not mapping New York! They&#8217;re mapping Hipster New York.</strong></p>
<p><strong>It&#8217;s all about Williamsburg and places where 24-year-olds go to drink&#8230; They found a demographic niche there. These guys are building the service for them. They&#8217;re people who are willing to work for Foursquare for free, because they want to wear the little king hat.</strong></p>
<p><strong>Tish Shute:</strong> I got the far far away badge &#8216;cos I live on the Upper West Side!</p>
<p><strong>Bruce Sterling: But that&#8217;s not urban geography, right? I mean, that&#8217;s not like Google&#8217;s satellite stare from above.  That&#8217;s a group of citizens doing a portrait of their own region.  You&#8217;re going to see interesting things happen because, of course, people who use Foursquare elsewhere are going to check into New York, and they&#8217;re going to look at the &#8220;New York Foursquare.&#8221;   They&#8217;re going to be aliens who interact with Foursquare people in New York and annotate what they&#8217;re seeing.</strong></p>
<p><strong>Tish Shute:</strong> Oh! Yes. Good point.</p>
<p><strong>Bruce Sterling:  That Foursquare community has a certain Ã©migrÃ© soul.  It&#8217;s different from the normal Ã©migrÃ© soul of simple tourists on New York. So you&#8217;re friend there is right about the territorialization.</strong></p>
<p><strong>Tish Shute:</strong> Yes, Joshua Kauffman is a smart guy!  Yes I am interested to see what interesting kinds of deterritorializations proximity based social networks and the hyperlocal view of augmented reality might bring, not just the new territorializations.</p>
<p><strong>Bruce Sterling: It&#8217;s not the intense kind of territorialization, like gangs putting down graffiti markers and beating people up.  It&#8217;s an inherent regional character that comes with using peer production to build your database.</strong></p>
<p><strong>Tish Shute:</strong> We were discussing whether AR could break down the walls between people &#8211;  people who share the same physical space but actually inhabit different territories even if they are sitting on the table next to you.</p>
<p><strong>Bruce Sterling: You know, I just wrote an article for my Italian magazine column. I think I mentioned this to you &#8211; a report about ARE 2010.   I titled it, &#8220;Chicks Dig Augmented Reality.&#8221;</strong></p>
<p><strong>Tish Shute:</strong> [laughs]</p>
<p><strong>Bruce Sterling:   There is a very heavy social element to AR, and a phone based element. So the question is: Why would a woman wear a fiducial marker? Like our <a href="http://www.metaio.com/" target="_blank">Metaio</a> speaker at ARE2010 who had a fiducial marker on her lapel pin.</strong></p>
<p><strong>Tish Shute:</strong> Right. Lisa!</p>
<p><strong>Bruce Sterling: Why would a woman go out in public with her Facebook profile on her body?</strong></p>
<p><strong>Tish Shute: </strong>Well I can think of some reasons&#8230;</p>
<p><strong>Bruce Sterling: So that men will approach her, of course.</strong></p>
<p><strong>Tish Shute:</strong> Yes the core of all successful social networks is always a form of dating app.</p>
<p><strong>Bruce Sterling: You do it a social icebreaker.  It&#8217;s like: I&#8217;m a woman, I&#8217;m sitting here alone, and you can sort of glide by and, you know, take a snap of me.  Then you retreat and have a beer with your friends and  you work up the courage, and then you come and say, &#8220;So! Susan!  I understand you like bicycling!  And, boy, me too!&#8221; Right?</strong></p>
<p><strong>Tish Shute:</strong> There are all kinds of social barriers between people in cities that AR might be helpful in breaking down.  An extreme example is the dilemma you actually quite often face as a New Yorker as you walk around a city.  There are people asleep on the pavement and you don&#8217;t know if they&#8217;re dead or alive.</p>
<p><strong>Bruce Sterling: Right.</strong></p>
<p><strong>Tish Shute:</strong> And you sort of like have this awful ethical dilemma of like, &#8220;Am I walking by someone I should be shaking by the shoulder, right, to wake them up so they don&#8217;t die, right?&#8221;</p>
<p><strong>Bruce Sterling: Yes.</strong></p>
<p><strong>Tish Shute: </strong> You said in your keynote that we should pay attention to augmentations for people and situations that could really use some augmentation..</p>
<p><strong>Bruce Sterling: Right. There actually is such an app in Britain right now.  I posted about it:  two Augmented Reality schemes for rubbish and hobos.</strong></p>
<p><strong>Tish Shute:</strong> Right. Yes I saw that!</p>
<p><strong>Bruce Sterling:  &#8220;Any sufficiently advanced technology is indistinguishable from garbage and hobos.&#8221;  You don&#8217;t need to personally find out whether this hobo is worth your help.  What you need is a good way to report the hobo to a hobo check-up service.   They come in, and they look on their own database or supply a database to you, or a facial recognition unit, whatever.  The service says: &#8220;Oh, well.  That&#8217;s Fred. He&#8217;s a paranoid schizophrenic. He always sleeps in that alley. Let him be.&#8221;</strong></p>
<p><strong>The same goes for the rubbish &#8212; although I don&#8217;t want to compare rubbish to hobos.   In fact, people do go out with their AR kits and take pictures of abandoned garbage bags and broken glass.  They upload them with geolocated tags for the local garbage guys.  Guys who are sitting around doing pretty much nothing because they don&#8217;t know where the rubbish is.</strong></p>
<p><strong>And they will come out and get the rubbish! I mean, they just deputize guys to go out and follow these alerts. Right?</strong></p>
<p><strong>But nobody predicted &#8212; least of all me &#8212; that you were going to have a high-tech Augmented Reality system that consisted of removing rubbish and derelicts. Right?   But rubbish and derelicts  always go profoundly under-reported. It&#8217;s just hard to get people&#8217;s attention.</strong></p>
<p><strong>But it&#8217;s very easy to set up a system so that, if you get  ten reports on the same piece of rubbish, that&#8217;s going to work its way to the top of the stack.   That&#8217;s why I was trying to get AR people away from the romance of  the hottest app for the shiniest machine.  More toward a design stance that&#8217;s more user-centric.</strong></p>
<p><strong>Where are the actual problems about stuff that we perceive?  Stuff we can&#8217;t do anything about?   Or people whose mechanisms of perceptions are harmed. They could be doing good work, being more participative, if they didn&#8217;t, basically, walk around without their glasses on.</strong></p>
<p><strong>Tish Shute:</strong> Well this leads well into the second word, Joshua suggested was interesting spring board &#8211; sensitivity.</p>
<p>On the one hand we can do these things for people who maybe need the augmentation because they have difficulty with one or another sense, e.g.,  their eyes are not functioning, or their ears are not functioning. But on the other hand, we can&#8217;t cross the social bridge to communicate with people who are temporarily disempowered in relation to the rest of society e.g. hobos and people who sleep on the streets of New York City.Â  And even though Augmented Reality could potentially be helpful it can even be more disempowering to the already disempowered.</p>
<p><strong>Bruce Sterling: Right.</strong></p>
<p><strong>Tish Shute:</strong> But re &#8220;sensitivity&#8221; &#8211; does augmentation increase or decrease our sensitivity?  This is a problem that Will Wright brought up [<a href="http://augmentedrealityevent.com/2010/06/14/are-2010-keynote-by-will-wright-brilliant-inspiration-for-the-augmented-reality-community/" target="_blank">see video of Will Wright&#8217;s keynote at are2010</a>], e.g, the problem of parking HUDs getting in the way of your intuitive parallel parking skills.  The Lexus that takes driving control from you when you look back, &#8216;cos it knows that you&#8217;re looking at the road, and it starts to brake. Right?</p>
<p><strong>Bruce Sterling: Right.</strong></p>
<p><strong>Tish Shute:</strong> The fact that the problem with technology is that it makes us less sensitive, right, augmentations sometimes get in our way?</p>
<p><strong>Bruce Sterling:  I suppose that&#8217;s true. But I&#8217;ve heard that said about practically every medium.  Especially television.</strong></p>
<p><strong>Everybody wants to blame machinery for their lack of morality.   It&#8217;s hard to top something like the Kitty Genovese killing in New York. This sort of legendary New York horror story from the 1960s. A woman is stabbed to death in public, no one does anything.</strong></p>
<p><strong>Tish Shute:</strong> Right.</p>
<p><strong>Bruce Sterling: I don&#8217;t think that our media is making us any less humane or more callous.</strong></p>
<p><strong>Tish Shute: </strong>All right. Oh no! I see what you&#8217;re saying. Perhaps I misrepresented what Will was suggesting by putting it that way.  The question is perhaps more how do we get the sensitivity into the technology.  Human bodies are fantastically sensitive and sensory.</p>
<p><strong>Bruce Sterling: Right.</strong></p>
<p><strong>Tish Shute: </strong>And we have these like sensitivities.  For instance, How could augmentations of reality be like a blush ? You definitely want an interaction that&#8217;s not just this data being pushed at you. But what is the data that counts, right?  Will shows a slide often of an iceberg with the tip of the iceberg which is the conscious mind.</p>
<p><strong>Bruce Sterling: Oh, I see.  Yeah.</strong></p>
<p><strong>Tish Shute: </strong> And underneath it is all the preconscious stuff that really counts, right?  Any thoughts on that?</p>
<p><strong>Bruce Sterling:  I did take interest in that.  Will has obviously been spending a lot of time studying cognition.</strong></p>
<p><strong>Tish Shute:</strong> Yes.</p>
<p><strong>Bruce Sterling:  Iâ€™m not convinced that AR has got a lot to do with that.  There is certainly a trend there.  There are a lot of people who want to do body hacks and brain hacks.  I can imagine AR being used for that purpose, but it seems like a niche application.   What is the point of our accessing even more stuff thatâ€™s outside of our consciousness?</strong></p>
<p><strong>Tish Shute:</strong> One of the things he is talking about is game dynamics, is it?  The role of the imagination in play.  For example, he shows the high dynamic range photos that make the world magical.  Something you want to engage with playfully.  This he points out increases a sense of agency because you are encouraged to engage and to play with the world.</p>
<p><strong>Bruce Sterling:  Well, Iâ€™m a literary guy.  Italo Calvino did a lot of writing about this.  He talked about the classics of literature.  Why do we read the classics?  Calvino said we do not read, but reread the classics.  And the reason we do that is that, at first, we read a classic book and we think, â€œBoy, this book is really good.&#8221;   Then, five years later, we read it again and we think, â€œBoy, this is a really good book, and itâ€™s got so much more in it than I thought it had when I was 18.â€  Then we read it again at 28, and itâ€™s like, â€œOK, now I really seem to understand this book, and it means something to me now that I didnâ€™t know when I was 18 and 25.â€</strong></p>
<p><strong>What you are doing through that access is learning something about yourself.  So Will is arguing is what I really need is like a better augmentation.  So that I can go in there and sop up the book all at once.  I can grab every cultural nuance in it, instead of the stuff thatâ€™s  sliding past me because Iâ€™m 18 and kind of young and hasty.  Maybe I could have certain words and phrases helpfully underlined, that are like, â€œOK, well, this part is problematic for you.â€  In some sense, thatâ€™s not allowing me to be 18.</strong></p>
<p><strong>Iâ€™m never going to have the experience of my own maturation against this text, because Iâ€™ve devoured it all in one gulp.</strong></p>
<p><strong>My argument would be that if you want people to be more sensitive toward certain, say, issues and problems, itâ€™s better to find the people who are already sensitive to those issues and problems, and give them a bigger stake in your augmentation system.</strong></p>
<p><strong>Tish Shute:</strong> Yes the social augmented experiences are going to be the most valuable.</p>
<p><strong>Bruce Sterling:  Say that I am really worried about public health.  Well, if I have a lot of nurses that are using my system, people who are aware of my issues, then I could be walking around and Iâ€™ll see a lot more tags saying, â€œThis is where he got food poisoning!â€  &#8220;In this shooting gallery, many people have caught AIDS!â€  Or, you know, â€œTuberculosis has been spotted over here in this building.â€</strong></p>
<p><strong>At that point, I could simply share their knowledge and get some social intelligence.  As opposed to trying to  amp the basements of my little hacker-mind and drag stuff up thatâ€™s escaped my conscious attention.</strong></p>
<p><strong>Tish Shute:</strong> Interesting that seems to bring us to another kind of repetitive theme in AR,  the people tend to pigeon hole it as &#8220;merely&#8221; a visual interface.  But actually, itâ€™s the intersection, isnâ€™t it, of social intelligence and augmentation.</p>
<p><strong>Bruce Sterling:  Well, it depends entirely on how you design the system.  If Iâ€™ve got a military augmented reality, I would expect that to be mostly about urban fighting.  Itâ€™s going to be about kicking in a door and shooting terrorists.   If I pull that helmet off my head and put that on the head of an emergency worker or a cop, Iâ€™m going to get a militarized cop or a militarized emergency worker.</strong></p>
<p><strong>Tish Shute:</strong> Well the histories of the two great mass media of the twentieth century &#8211; TV and the atomic bomb were intertwined, and I suppose the evolution of ubiquitous media, augmented reality and urban warfare is already intertwined too.Â   So how can we encourage augmented realities to move beyond military roots that is common to much technology and into more peaceful urban realities?</p>
<p><strong>Bruce Sterling:  Well,  itâ€™s a matter of deciding whose reality it is that youâ€™re trying to augment.  Iâ€™m not trying to be a bleeding heart about it, but obviously there are people in our society right now with reality that could really use some augmentation.  They are mostly disadvantaged people.  They are vision impaired, or maybe they have autism.  They might be senile and just canâ€™t remember where they put their shoes.  These are people who could really use some help, right?</strong></p>
<p><strong>So, start with people who really need sensory or cognitive help. Before you  turn our geeks into Superman, why donâ€™t you try turning some people who are harmed into more functional individuals?  Then youâ€™ll be able to learn how to do that. Then maybe you can ramp it up to these Nietzschian heights of the superb Man With the X-ray Eyes.  Whatever.</strong></p>
<p><strong>Tish Shute:</strong> Did you notice that a couple of apps actually like <a href="http://www.tagwhat.com/" target="_blank">TagWhat</a> have apps geared towards people with disabilities &#8211; I haven&#8217;t had a chance to check it out.</p>
<p><strong>Bruce Sterling: Iâ€™m sorry, I wasnâ€™t looking at their tags.</strong></p>
<p><strong>Tish Shute:</strong> I was discussing this with Joshua who mentioned <a href="http://www.eyewriter.org/" target="_blank">Zachary Liebermanâ€™s Eye Writer</a>, which is for people with locked-in syndrome. Do you know that?</p>
<p><strong>Bruce Sterling: Sure. And people appreciate that because the poor guy, heâ€™s laid up with Lou Gehrigâ€™s Disease. Now theyâ€™ve given him  a way out.  AR is like a spark of new hope that gives his life meaning. Whatâ€™s wrong with that?</strong></p>
<p><strong>Tish Shute:</strong> Yeah. And <a href="http://www.youtube.com/watch?v=IJ8VMLECToQ" target="_blank">Tim Byrne using Sixth Sense</a> for Autism is interesting.</p>
<p><strong>Bruce Sterling: Letâ€™s consider it the other way. Letâ€™s say this graffiti writer there, instead of him being sick and weak, letâ€™s say heâ€™s an athlete.  So I want to make him into a super-human graffiti writer. I want him to run around graffiti-tagging the entire town before dawn. Is that a good idea? Do we need that? Super human, super taggers? What if heâ€™s going to spray up stencils of  Nietszche?  I kinda wonder whether the game is worth the candle.</strong></p>
<p><strong>Tish Shute: </strong>Yes I suppose it is not a great social scenario to be always augmenting the lives of the elites!  Hmm, the third single word interview question is &#8220;homophily,&#8221; and earlier were youâ€™re saying that weâ€™ve kinda got to accept this is very much part of AR &#8211; as how it works, because hyperlocal experiences gets created by local communities &#8211; that up to know have tended to be homophilies.</p>
<p><strong>Bruce Sterling: Well, I think thatâ€™s easily handled with some design thinking. You&#8217;ve got to do some user observation and show some sympathy with the user, and to be aware that youâ€™re designing for the user and youâ€™re not designing for yourself.</strong></p>
<p><strong>In a field as young as this, itâ€™s mostly geeks building cool stuff for geeks. In a lot of ways, itâ€™s a â€œcan you top thisâ€ contest. Thatâ€™s OK, but itâ€™s not good design to be your own client all the time. Itâ€™s like writing novels to amuse yourself, or sitting on the porch singing the blues on your own guitar with only yourself to hear.</strong></p>
<p><strong>Tish Shute:</strong> What will it take for AR mature out of this &#8220;geeks building cool stuff for geeks&#8221; phase do you think?</p>
<p><strong>Bruce Sterling:  Itâ€™s necessary to master some of the tools first.  I think of the way the web has developed over the years. When the World Wide Web first appeared, it was just for physicists, and was all line commands and quite unstable and difficult. Then you got usability studies, and things like Ajax and so forth. Itâ€™s a very painstaking thing.</strong></p>
<p><strong>Weâ€™re not best at  building interfaces for the best computer scientists.  Web 2.0 was built from things like watching people cry while they were trying to fill out insurance forms. â€œWell, why are you so upset?â€</strong></p>
<p><strong>â€œWell, I got to the end of the webpage, and then it said I took too long, and it cut me off and now I have to start all over!â€ <a href="http://blog.jjg.net/" target="_blank">Jesse James Garrett</a>, right? Benefactor of mankind.</strong></p>
<p><strong>If youâ€™re experienced, you think:  â€œWhy donâ€™t I build a little module here, and kind of move the form over here, then Iâ€™ll periodically update it with some asynchronous Java and XTML.â€ And people are like, â€œGee, how odd.â€ But that really works for real people. It comes from studying what people want to do.  Whereas, the current AR approach to a problem like the insurance form would be like, â€œI will give you the ability to record the entire insurance form, and it will flash before your eyes!â€    OK great, thatâ€™s a cool hack, but I donâ€™t really need X-Ray Eyes to fill out my insurance form. What I need is a more user friendly interface.</strong></p>
<p><strong>Tish Shute:</strong> Well it seems like we are moving into the terrain of Joshua&#8217;s fifth word &#8220;ventilation,&#8221; &#8211; if I understand it rightly &#8211; it is at least partially the antidote to territorialization because itâ€™s this idea that a place needs air so we come out of our hermetically sealed boxes of the way we relate to a place and what kind of augmentation would bring more oxygen to that space.</p>
<p>There was an interesting moment in the Auggies because when <a href="http://twitter.com/dutchcowboy" target="_blank">Maarten Lens-FitzGerald</a> presented the guerrilla shopping Layar and basically Mark Billinghurst and Jessie Schell who spoke first didn&#8217;t seem too impressed. They didnâ€™t want to walk to shopping &#8211; that was what web shopping did, it saved us from walking to shop&#8230; but I felt, to me you picked up on something which might have some bearing on &#8220;ventilation&#8221; in that this AR shopping Layar was kind of squatting Prada &#8211; a favela chic AR shopping thing?</p>
<p><strong>Bruce Sterling: I wasnâ€™t sure if I was interpreting what Maarten had in mind by that.  But I think Maarten sees his structure accurately as an experience thing rather than a mapping thing. I think heâ€™s proudest of things like the Berlin Wall app on Layar, as opposed to Layars that help you go get a hamburger. Itâ€™s like&#8230;so when Layar inserts parasitic augmented shopping over other peopleâ€™s  real shopping? That was rather a subversive thing.</strong></p>
<p><strong>I think the key there is that his client is called &#8220;Hostage T-shirts,&#8221; right? I mean itâ€™s actually kind of a transgressive little hippy T-shirt store that Layar can dump anywhere in the world. Layered right over, say, Versace and Prada.  I donâ€™t know what becomes of that effort. And Iâ€™m not sure about the term â€œventilation,â€ because thatâ€™s a term of art I havenâ€™t heard much.</strong></p>
<p><strong>Tish Shute:</strong> Maybe it&#8217;s like in a cafe.  Ventilation would mean we were able to communicate with all these different categories of people that we normally would be unable to connect to, even though we might be sitting only a few feet apart.</p>
<p><strong>Bruce Sterling:   So it means ventilation in the bottles of our homophilies.</strong></p>
<p><strong>Thatâ€™s not a personal problem for me.  I commonly live in foreign cities and, you know, and spend a helluva lot of time talking to strangers at conferences. So I donâ€™t think Iâ€™d have that particular tight little social island problem.</strong></p>
<p><strong>Tish Shute:</strong> Of the three judges at the Auggies, you seemed most enthusiastic about the Layar entry.</p>
<p><strong>Bruce Sterling: It may be theyâ€™re not as familiar with the business models of locative AR as I am, and as Maarten is. It was kind of a subtle in-joke he was making about Layarâ€™s own business model there.</strong></p>
<p><strong>Tish Shute: </strong>How do you explain that?</p>
<p><strong>Bruce Sterling: Well, you know, Layar&#8217;s in the business of  selling software to make mapping and urban structures into ecommerce.</strong></p>
<p><strong>The ideal way to do that obviously would be to move the richest customers into the most expensive shops in the most rapid way possible. Or at least distribute them in the directions they want to go, a la Google. Whereas this app that Maarten was talking about puts big barnacles in the way that are selling punk t-shirts.</strong></p>
<p><strong>Tish Shute:</strong> Right! Right!</p>
<p><strong>Bruce Sterling:   The Dutch are a bit subtle in their humor.  I rather imagine thereâ€™s a lot of discussion in Layarâ€™s inner circle about exactly what they want developers to do with their platform. Theyâ€™re going to have considerable political difficulty deciding who can have a Layar key and how you discipline people when they start doing weird stuff. &#8220;The Oakland Medical Marijuana layar.&#8221;</strong></p>
<p><strong>Tish Shute:</strong> Well, finding nudists is one of the top layars at the moment.</p>
<p><strong>Bruce Sterling: You know, obviously so. And finding narcotics in Amsterdam, or a prostitution layer.  I warned them nine months ago this was bound to happen. Iâ€™m sure theyâ€™re aware of it.  I don&#8217;t think Layar wants Googleâ€™s style of cool, technocratic detachment.</strong></p>
<p><strong>Tish Shute:</strong> But thatâ€™s pretty difficult to do in current augmented reality because we donâ€™t have all the mathematical voodoo for full on AR search yet, do we?</p>
<p><strong>Bruce Sterling: Well, you can hire it out. Somebodyâ€™s going to do it, if they get interested enough.  Thereâ€™s Nokia-Yahoo. Nokia-Yahoo! just did a big corporate deal&#8230;involving Nokiaâ€™s mapping system and Yahooâ€™s localization. So the Nokia-Yahoo! mash-up is called Nooo!   Or could be called Yahno. Yakia!  Unfortunately ridiculous names.</strong></p>
<p><strong>Tish Shute:</strong> Itâ€™s interesting because you mentioned the spidersâ€™ mating problem at Google. Theyâ€™ve got all the pieces to make this kind of level of AR obviously right now. But they actually havenâ€™t done it yet.</p>
<p><strong>Bruce Sterling: There must be at least some discussion in Google, but the same goes for Microsoft. Iâ€™m frankly baffled by Microsoft, because itâ€™s just full of insanely brilliant people. What the hell are they doing in there? Name one serious innovation thatâ€™s come out of their labs in five years. They make Integral Research look dynamic. Itâ€™s really kind of sad.</strong></p>
<p><strong>Tish Shute:</strong> Itâ€™s a very curious situation with AR though, because AR more than any new technology relies on these big hordes of data particularly for the mapping, right? And only the big four have the data &#8211; although we are beginning to see upstarts, Earth Mine, Simple Geo&#8230; Did you get a chance to meet Di-Ann Eisnor  from <a href="http://www.waze.com/homepage/" target="_blank">Waze &#8211; real-time maps and traffic information based on the wisdom of the crowd</a>.Â  Waze is a very interesting project that is a potential giant killer.</p>
<p><strong>Bruce Sterling: No, I didnâ€™t talk to them.  Iâ€™ve seen people speculate that Earthmine and Apple are going to make an allegiance. I guess if youâ€™re thinking that urban informatic mapping is a super big thing for AR, that must be true.   But Iâ€™m not convinced thatâ€™s necessarily the case. People have pointed out that you can just use Google Maps, and you donâ€™t have to walk around with a little visor.  There are other aspects of AR besides the cell phone space. Thereâ€™s Total Immersion&#8217;s big display screens. Thereâ€™s the web-based fiduciary stuff. And thereâ€™s projection mapping. And then thereâ€™s experience design just for people who need their reality augmented for whatever personal or social reason. [dog barking]</strong></p>
<p><strong>Tish Shute:</strong> Right. Oh, Iâ€™m in the middleâ€¦ My sonâ€™s come. What a good hair cut!</p>
<p><strong>Bruce Sterling: Hi, there.</strong></p>
<p><strong>Tishâ€™s Son</strong>: Hi.</p>
<p><strong>Bruce Sterling: Howâ€™s it going, sir? Good to see youâ€¦</strong></p>
<p><strong>Tishâ€™s Son:</strong> Good.</p>
<p><strong>Tish:</strong> [laughs]</p>
<p><strong>Bruce Sterling: Yeah. Nice looking shirt. I like the back of it.</strong></p>
<p><strong>Tish Shute:</strong> Thatâ€™s from the American Shaolin Temple. [laughs<strong>]</strong></p>
<p><strong>Bruce Sterling: All</strong> right. Awesome. Kung Fu geek shirt.</p>
<p><strong>Tish Shute:</strong> Yup he is a bit of Kung Fu Geek. He and his dad did an iPhone app on it for Yu-Gi-Oh, for Yu-Gi-Oh scoring.</p>
<p><strong>Bruce Sterling: Awesome. Plenty of PokÃ©mon-style combat in Yu-Gi-Oh.</strong></p>
<p><strong>Tish Shute:</strong> Yeah. Well, itâ€™s interesting because youâ€™ve talked about this aspect. That all of this, the PokÃ©mon aspect of AR hasnâ€™t kicked in yet. But itâ€™s obviously a match made in heaven to some degree, isnâ€™t it?</p>
<p><strong>Bruce Sterling: One would think so, yeah.  The whole little kid gaming thing. What does that have to do with Google or Bing? You donâ€™t need a massive database for stuff like that.</strong></p>
<p><strong>Tish Shute: </strong>Yeah, youâ€™re right. But good tracking, mapping and registration requires a lot of mapping&#8230;</p>
<p><strong>Bruce Sterling: Well, our current tracking, mapping and registration requires that. Maybe thereâ€™s some other way to hack it that we donâ€™t know about yet.</strong></p>
<p><strong>Tish Shute: </strong>Thatâ€™s a very interesting point. We always have to stretch the way we think about mappingâ€¦ perhaps its a real-time understanding of the location youâ€™re in&#8230;perhaps the map is being negotiated through several social processes?</p>
<p><strong>Bruce Sterling: There are maps, and then there are maps. Thereâ€™s a kind of artillery map where you need to know the precise location of target spaces. And then thereâ€™s the kind of social map where Iâ€™m really looking for the IN-N-OUT Burger where my sister went last Tuesday. Thatâ€™s a different  system.</strong></p>
<p><strong>Tish Shute:</strong> And I think AR, at the moment, weâ€™re getting the most out of the social maps certainly. And the other [machine   perception  technologies to detect  the identity and physical    configuration of  objects relative to each  other to accurately  project   information  alongside/overlaid with a physical object] is still kind of the big dream, isnâ€™t it?</p>
<p><strong>Bruce Sterling: They say that men never ask for directions and women never read maps. Clearly, the genders have different ways of navigating the world. Whoâ€™s to say what manner of augmenting our experiences is hottest?  Iâ€™m not convinced that todayâ€™s rather rigid geolocativity is really what our society wants from that particular service. Maybe what we want is something more folksy.   Some useful nudge in the right direction as opposed to grids with 200 meters here and instructions to turn such-and-such.</strong></p>
<p><strong>Besides, thereâ€™s other hacks we havenâ€™t considered.  Weâ€™re very dependent on GPS, but just suppose all those satellites are blown out of the sky in a solar storm. Would we really want to give up mapping? Wouldnâ€™t we just come up with some other nifty hack?  Radio beacons, letâ€™s just say. Atomic clock timers in towns. Or maybe just little QR codes on lampposts that give you the exact location of that lamppost, and just click the thing and have it calculate where you are.</strong></p>
<p><strong>Tish Shute:</strong> Yes the <a href="http://thenexthope.org/" target="_blank">NextHope</a> <a href="http://thenexthope.org/2010/07/hackable-badge-accessory-kits-available/" target="_blank">OpenAMD project</a> had a clever way of triangulating location indoors.</p>
<p><strong>Bruce Sterling: Well, GPS is there and people all want to use it. Itâ€™s got good API, so of course you want to. And the guys who are good at doing it are real geolocative freaks. But the mere fact that we can do it this way, and that you can make it pay, doesnâ€™t mean that itâ€™s the ultimate way to provide that service to people.  Itâ€™s like saying that Egyptian hieroglyphics must be the greatest way to write,  because weâ€™ve got a lot of them and theyâ€™re hard to learn. What if somebody comes along with an alphabet? Itâ€™s going to be a little embarrassing.</strong></p>
<p><strong>Tish Shute:</strong> Yeah, thatâ€™s a very good point. Now, this is a more simple ordinary question about the event. <a href="http://www.ydreams.com/#/en/homepage/" target="_blank">YDreams</a> went off the map in the Auggie voting, and walked away with The Auggies. No one doubted that that was the mostâ€¦</p>
<p><strong>Bruce Sterling: I donâ€™t know. I thought those <a href="http://occipital.com/blog/" target="_blank">Occipital</a> guys with the panoramic painting&#8230;. That was hairy. I would have been tempted to give them the prize myself, actually.</strong></p>
<p><strong>Tish Shute:</strong> And what did you like best about that? Because I agree. I love <strong><a href="http://occipital.com/blog/" target="_blank">Occipital</a></strong>.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/09/Screen-shot-2010-09-17-at-6.20.58-PM.png"><img class="alignnone size-medium wp-image-5704" title="Screen shot 2010-09-17 at 6.20.58 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/09/Screen-shot-2010-09-17-at-6.20.58-PM-300x41.png" alt="Screen shot 2010-09-17 at 6.20.58 PM" width="300" height="41" /></a></p>
<p><em>click to enlarge</em></p>
<p><strong>Bruce Sterling: I thought it was a more technically difficult stunt than the hand registration thing.  Using a hand as a 3-D cursor is hot, but  not like painting a panorama in 3-D in real time.  That was an impressive technical feat.</strong></p>
<p><strong>Tish Shute: </strong>And they hinted at the 2.1.1 AR, more AR version of that. What do you see coming out of that as possibilities?</p>
<p><strong>Bruce Sterling: Well, Iâ€™d heard of <a href="http://www.ydreams.com/#/en/homepage/" target="_blank">YDreams</a>, so I wasnâ€™t stunned. But Iâ€™d never heard of those guys. I wonder what else the heck theyâ€™ve got in the att</strong>ic.</p>
<p><strong>Tish Shute:</strong> very cool stuff&#8230;</p>
<p><strong>Bruce Sterling: Well, more power to them. But clearly YDreams was the popular favorite. And who couldnâ€™t like it? It was just so AR.</strong></p>
<p><strong>Tish Shute</strong>: It was so AR and so gorgeous.</p>
<p><strong>Bruce Sterling: It was pretty, actually.Â  Except for their ugly menu button and poor font choice.</strong></p>
<p><strong>Tish Shute:</strong> Oh, yes. You didnâ€™t like that, did you? [laughs] But with the Occipital panorama, what do you see the next stage of that?</p>
<p><strong>Bruce Sterling: Well, obviously quicker and faster. Quicker and faster and more accurate in a network. Letâ€™s just say Iâ€™m in New York and youâ€™re in New York and Iâ€™m calling you for help. And you say where are you?  I just whirl around like this and I mail it to you on a Google Wave. And you whirl around like that, and then we compare the two panoramas and do an instant triangulation. And you say: Iâ€™m over here on this red dot of your screen.</strong></p>
<p><strong>Tish Shute: </strong>Yeah, exactly.</p>
<p><strong>Bruce Sterling:  Weâ€™re navigating with panoramas by having two connected panoramas and considering the difference.</strong></p>
<p><strong>Tish Shute: </strong> Yeah, very interesting&#8230;</p>
<p><strong>Bruce Sterling: Not shabby, right?</strong></p>
<p><strong>Tish Shute:</strong> Not shabby at all.</p>
<p><strong>Bruce Sterling: If you could do it in real time.</strong></p>
<p><strong>Tish Shute:</strong> Then the other thing I missed because I was going to meet Will was I missed the Launch Pad competition. Did you catch that?</p>
<p><strong>Bruce Sterling: I didnâ€™t see it either. I thought of another app though.</strong></p>
<p><strong>Tish Shute:</strong> Oh!</p>
<p><strong>Bruce Sterling: Youâ€™ve got a panorama maker in your home office, and it just scans the office 24 hours 365 and tags anything that moves, right? OK, whereâ€™s the clipboard?Â  At 8:15 it was over here.  Now itâ€™s vanished. Now another object is viewed over here. So, logically, ping, you hit it with a sticky light and there it is, right?</strong></p>
<p><strong>Tish Shute:</strong> Oh,  that&#8217;s cool also knowing what has changed in any environment would be a big enabler for a lot of AR visions.</p>
<p><strong>Bruce Sterling:  Iâ€™m sure there are many other things you could do with panoramas.</strong></p>
<p><strong>Tish Shute:</strong> My jet lag is beginning to kick in big time &#8211; so many ideas to pursue from are2010 &#8211; those panoramas are very exciting though.</p>
<p><strong>Bruce Sterling: Oh, well, itâ€™s all right.  We can augment reality!   Iâ€™ve got three heads and six hands!</strong></p>
]]></content:encoded>
			<wfw:commentRss>http://www.ugotrade.com/2010/09/17/urban-augmented-realities-and-social-augmentations-that-matter-interview-with-bruce-sterling-part-2/feed/</wfw:commentRss>
		<slash:comments>8</slash:comments>
		</item>
		<item>
		<title>The Next Wave of AR: Exploring Social Augmented Experiences at Where 2.0</title>
		<link>http://www.ugotrade.com/2010/03/29/the-next-wave-of-ar-exploring-social-augmented-experiences-at-where-2-0/</link>
		<comments>http://www.ugotrade.com/2010/03/29/the-next-wave-of-ar-exploring-social-augmented-experiences-at-where-2-0/#comments</comments>
		<pubDate>Mon, 29 Mar 2010 05:25:03 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[Android]]></category>
		<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Artificial Intelligence]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[culture of participation]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[mirror worlds]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[social gaming]]></category>
		<category><![CDATA[social media]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[World 2.0]]></category>
		<category><![CDATA[Anselm Hook]]></category>
		<category><![CDATA[AR Blip]]></category>
		<category><![CDATA[AR browsers]]></category>
		<category><![CDATA[ARWave]]></category>
		<category><![CDATA[ARWave demo]]></category>
		<category><![CDATA[atemorality]]></category>
		<category><![CDATA[atemporal network culture]]></category>
		<category><![CDATA[augmented reality and federation]]></category>
		<category><![CDATA[augmented reality event]]></category>
		<category><![CDATA[augmented reality search]]></category>
		<category><![CDATA[augmenting the map as interface]]></category>
		<category><![CDATA[Brady Forrest]]></category>
		<category><![CDATA[Bruce Sterling]]></category>
		<category><![CDATA[collaborative augmented reality]]></category>
		<category><![CDATA[Davide Carnovale]]></category>
		<category><![CDATA[Dennou Coil]]></category>
		<category><![CDATA[design principles for social augmented experiences]]></category>
		<category><![CDATA[FourSquare]]></category>
		<category><![CDATA[Google Wave]]></category>
		<category><![CDATA[gowalla]]></category>
		<category><![CDATA[Jeremy Hight]]></category>
		<category><![CDATA[Jesse Schell]]></category>
		<category><![CDATA[Joe Lamantia]]></category>
		<category><![CDATA[layers and channels of augmentation]]></category>
		<category><![CDATA[location technologies]]></category>
		<category><![CDATA[locative media]]></category>
		<category><![CDATA[locative narratives]]></category>
		<category><![CDATA[Markus Strickler]]></category>
		<category><![CDATA[narrative archaeology]]></category>
		<category><![CDATA[open augmented reality]]></category>
		<category><![CDATA[open distributed augmented reality]]></category>
		<category><![CDATA[pygowave]]></category>
		<category><![CDATA[real time social augmented experiences]]></category>
		<category><![CDATA[Ruby On Sails]]></category>
		<category><![CDATA[social AR]]></category>
		<category><![CDATA[social AR and crisis response]]></category>
		<category><![CDATA[social augmented experiences]]></category>
		<category><![CDATA[Sophia Parafina]]></category>
		<category><![CDATA[Thomas Wrobel]]></category>
		<category><![CDATA[Wave]]></category>
		<category><![CDATA[Wave Federation Protocol]]></category>
		<category><![CDATA[Where2.0]]></category>
		<category><![CDATA[WhereCamp]]></category>
		<category><![CDATA[Will Wright]]></category>
		<category><![CDATA[writing within the map]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=5332</guid>
		<description><![CDATA[Where 2.0 is going to be epic this year (see my interview with Brady Forrest here), and it is so exciting to be part of it.Â  Location technologies and augmented reality are annointed rulers now.Â  Time Magazine recognized augmented reality as one of its 10 Tech Trends for 2010 (for more see ReadWriteWeb). The photo [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/jeremyandlisahight.jpg"><img class="alignnone size-medium wp-image-5336" title="jeremyandlisahight" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/jeremyandlisahight-300x160.jpg" alt="jeremyandlisahight" width="300" height="160" /></a></p>
<p><a id="jqit" title="Where 2.0" href="http://en.oreilly.com/where2010">Where  2.0</a> is going to be epic this year (see <a id="ysmn" title="my interview with Brady Forrest here" href="../../2010/02/10/the-physical-world-becomes-a-software-construct-talking-with-brady-forrest-about-where-2-0-2010/">my interview  with Brady Forrest here</a>), and it is so exciting to be part of it.Â   Location technologies and augmented reality are annointed rulers now.Â  <a href="http://www.time.com/time/specials/packages/article/0,28804,1973759_1973760_1973797,00.html">Time  Magazine recognized</a> augmented reality as one of its 10 Tech Trends  for 2010 (for more <a href="http://www.readwriteweb.com/archives/augmented_reality_among_times_10_tech_trends_2010.php" target="_blank">see ReadWriteWeb</a>).</p>
<p>The  photo above is by Jeremy and Lisa Hight.Â  <a id="ohzg" title="Jeremy Hight" href="http://34n118w.net/">Jeremy Hight</a> is an information  designer, theorist and artist working in Augmented Reality and Locative  Media. Â  His essay â€œNarrative Archaeologyâ€ was named one of the 4  primary texts in Locative Media.</p>
<p><a id="xel:" title="Jeremy Hight" href="http://en.oreilly.com/where2010/public/schedule/speaker/69399">Jeremy Hight</a> will be part of our  panel: <a title="The Next Wave of AR: Exploring Social Augmented Experiences" href="http://en.oreilly.com/where2010/public/schedule/detail/11046">The  Next Wave of AR: Exploring Social Augmented Experiences</a>, with <a id="b49q" title="Anselm Hook" href="http://en.oreilly.com/where2010/public/schedule/speaker/6545">Anselm Hook</a>, <a id="h3j-" title="Joe Lamantia" href="http://en.oreilly.com/where2010/public/schedule/speaker/26367">Joe Lamantia</a>, <a id="xtfk" title="Sophia Parafina" href="http://en.oreilly.com/where2010/public/schedule/speaker/59688">Sophia Parafina</a> and <a id="uw9f" title="myself." href="http://en.oreilly.com/where2010/public/schedule/speaker/38011">myself.</a> We will <a href="http://www.youtube.com/watch?v=ZjXCTCSKtRQ" target="_blank">debut the video of the  ARWave project demo </a>that brings together augmented reality,  geolocation, and wave federation (more details later in this post).Â  And, Jeremy will bring to our  presentation some augmentations on his recent brilliant work and paper, <a href="http://www.neme.org/main/1111/writing-within-the-map" target="_blank">â€œWriting Within the Map.â€</a></p>
<p>Greg  J. Smithâ€™s points out in <a href="http://serialconsign.com/2010/03/thoughts-writing-within-map#comments" target="_blank">his in depth look at Jeremyâ€™s work</a> that it, <strong>â€œdovetails  with some of the main points in Bruce Sterlingâ€™s recent <a href="http://www.wired.com/beyond_the_beyond/2010/02/atemporality-for-the-creative-artist/">atemporality  keynote</a> at Transmedialeâ€ â€“ </strong>fortunately there is a <a href="http://www.wired.com/beyond_the_beyond/2010/02/atemporality-for-the-creative-artist/" target="_blank">transcription of Bruceâ€™s keynote here</a>.Â  What is so  awesome about this dovetailing is that you can get a feel for the  fun part of living in an, â€œatemporal network culture.â€Â  And, if you want  to really understand just how much locative media and augmented reality  have changed us, youÂ  might want to dig into these texts.</p>
<p>Bruce  Sterling and Jeremy Hight, and members of the ARWave team, and a  superb cast of augmented reality movers and shakers &#8211; including Will  Wright and Jesse Schell, will be <a id="ncnl" title="speaking at Augmented Reality Event in Santa Clara, June 2nd and  3rd." href="http://augmentedrealityevent.com/speakers/">speaking at Augmented Reality Event in Santa Clara, June 2nd and  3rd.</a></p>
<p>But, this week, the AR community&#8217;s attention  will be on the events at Where 2.0.Â Â  The  keynote speakers will be streamed live, so if you are not fortunate  enough to be there, tune in!</p>
<h3>The Next Wave of AR: Exploring Social Augmented Experiences</h3>
<p>On our panel, Jeremy  Hight, Anselm Hook, Sophia Parafina, Joe Lamantia and I will cover some  of the key social, cultural, technical and interactional questions for  exploring social augmented experiences. There will be five lightning  presentations, and an opportunity for questions from the audience, and a  world premier of the ARWave demo!</p>
<p><strong>1)  â€œAugmenting the map as interface: AR and Locative Narrativesâ€ -</strong> Jeremy Hight<strong><br />
</strong></p>
<p><strong>*Map augmentation of the historic route 66  can house an essay contest and publication globally but as embedded  within that map augmentation instead of books or even web sites.</strong></p>
<p><strong>*  A place on a map can be a graphic index and database to save and  collect<br />
the writing of that place with a graphic or textual search  index.</strong></p>
<p><strong>*One can pop immersive visualizations of abandoned or lost  buildings from map location in shared software and collectively augment  (imagine channels within the lost core of detroit where one is memories  and accounts tagged within parts in the immersive visualization while  another is of poems and stories written by people moved by the place and  its semiotics and story).</strong></p>
<p><strong>*The news stand is to be the map.</strong></p>
<p><strong>*New  forms of literature will be born of mapping, spaces,augmentation and<br />
new tools</strong></p>
<p>The concept drawings below (click to  enlarge)Â are  a collaboration between Jeremy Hight and Paul Wehby, Senior Designer at  <a href="http://www.lacma.org/" target="_blank">LA County Museum of Art.</a></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/wehby1post.jpg"><img class="alignnone size-thumbnail wp-image-5342" title="wehby1post" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/wehby1post-150x150.jpg" alt="wehby1post" width="150" height="150" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/wehby2post.jpg"><img class="alignnone size-thumbnail wp-image-5343" title="wehby2post" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/wehby2post-150x150.jpg" alt="wehby2post" width="150" height="150" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/wehby3post.jpg"><img class="alignnone size-thumbnail wp-image-5352" title="wehby3post" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/wehby3post-150x150.jpg" alt="wehby3post" width="150" height="150" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/wehby4post.jpg"><img class="alignnone size-thumbnail  wp-image-5353" title="wehby4post" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/wehby4post-150x150.jpg" alt="wehby4post" width="150" height="150" /></a></p>
<p><strong>2) </strong>Anselm Hook will look at, <strong>&#8220;10 reasons why AR isn&#8217;t a  flash in the pan,&#8221; </strong>and how,<strong> â€œAR can help us see the world we  would like to have exist.â€</strong></p>
<p>Anselm notes, <strong>â€œSo  much of what we do is so fickle and Iâ€™m looking for ways to connect  digital media work to deep values.â€</strong></p>
<p><strong>3)</strong> Sophia Parafina will present on, <strong>â€œSocial AR and Crisis Responseâ€</strong></p>
<p><strong>â€œAugmented  reality as a multi-party conversation. Â Rather than being passive  viewers of AR with a limited ability to Â checkin to places and make  annotations, current devices can broadcast sensor information that can  be fused into an interactive stream. AR users can send and receive  information, location, and sensor data from their mobile device.Â  The  streams can be federated into a unique AR view composed by the user.</strong></p>
<p><strong>Entertainment  and gaming are obvious applications, but it can also be applied to  crisis situations such as the search and rescue operations in Haiti.  Â Efforts such as Mission 4636, the SMS translation service, could  benefit from AR views. Â The collaboration among the Mission 4636  volunteers was the key element Â in their success for providing location  and rapid translation to responders on the ground.</strong></p>
<p><strong>With an AR  view, responders can send back their sensor information from their  mobiles to provide contextual information to remote volunteers. Â This  extends the conversation between remote volunteers and on the ground  responders and fosters collaboration which was a key element for the  success of Mission 4636â€³</strong></p>
<p><strong>4)</strong> Joe Lamantia,  an experience design and strategy consultant helping to define the  interaction framework and scenarios behind ARWave, will discuss, <strong>â€œDesign  Principles For Social Augmented Experiences:â€</strong></p>
<p><strong>â€œWith  the exotic mixed realities envisioned by futurists and science fiction  writers seemingly around the corner, it is time to move beyond questions  of technical feasibility to consider the value and impact of turning  reality inside out for everyday social settings and experiences. Thanks  to the inherently social nature of augmented reality, we can be sure the  value and impact of many augmented experiences depends in large part on  how effectively they integrate with the social dimensions of real-world  settings, in real time.&#8221;</strong></p>
<p>Joe will share, <strong>&#8220;eight guiding  principles for designing experiences that engage naturally with the  social dimension, and increase the value of augmented experiences.&#8221; </strong></p>
<p><strong>5) <a id="y08e" title="AR Wave" href="http://groups.google.com/group/arwave">&#8220;ARWave</a> &#8211; A demo and state of play,&#8221; </strong>from Tish Shute</p>
<p>I  will have the awesome privilege, on our Where 2.0 panel, of showcasing <a id="y08e" title="AR Wave" href="http://groups.google.com/group/arwave">ARWave</a>.Â Â  We willÂ   premier the ARWave demo which shows how ARWave has accomplished the  basics of geolocating data on Wave Federation Protocol (and real time  collaboration on this geolocated data).Â  <span id="ejpu" dir="ltr">If  you&#8217;re interested in the ARWave project join the <a id="n4k6" title="Mailing  list" href="http://groups.google.com/group/arwave">Mailing list</a>, FAQ are <a id="medt" title="here" href="http://lostagain.nl/websiteIndex/projects/Arn/information.html">here</a>, and have a peek at the current state of  development at <a id="ius-" title="Google Code" href="http://code.google.com/p/arwave/">Google Code</a>, and the <a id="dj:p" title="specification for an AR Blip" href="http://arwave.wiki.zoho.com/ARBlip-Specification.html">specification for an AR Blip</a>.Â   We also have Waves for the project hosted on Google Wave.Â  You can  join the general discussion <a id="xiwt" title="here" href="https://wave.google.com/wave/#restored:wave:googlewave.com%21w%252BJAcNzz16A">here</a>, and the technical side <a id="s393" title="here" href="https://wave.google.com/wave/#restored:wave:googlewave.com%21w%252Bhvk2Fj3wB">here</a>.</span></p>
<p>The picture below is a  screen shot from the demo video produced by core AR Wave developer and  concept designer, Thomas Wrobel.</p>
<p>Click on the  image to enlarge, and note: <strong>â€œThe pink thing is from Dennou Coil. Its  an anti-virus program (that literally chaseâ€™s down bugs and glitches and  removes them).â€</strong></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/Screen-shot-2010-03-27-at-6.58.55-PM.png"><img class="alignnone size-medium wp-image-5344" title="Screen shot 2010-03-27 at 6.58.55 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/Screen-shot-2010-03-27-at-6.58.55-PM-281x300.png" alt="Screen shot 2010-03-27 at 6.58.55 PM" width="281" height="300" /></a></p>
<h3>ARWave</h3>
<p>In ARWave, stories or art are tied to place. And as Jeremy Hight  writes:</p>
<p><strong>â€œThe possibility exists to take a part of an  area and overlay a dystopia, a utopia, multiples of each of these, or  even recreations of previous incarnations in the past. Writing and  publication thus cannot only be of place, and form(s), but of selected  augmentations of icons, streets, buildings and related texts on top of  the map. These spaces can be built in real time and can be turned on and  off as channels of augmentation that over time illustrate many faces of  place in its present, past, possible futures,etc. with texts within  these alternate spaces as commentary, as fused aesthetic analysis, or  simply creative writing relevant to these charged and hybrid spaces.â€</strong></p>
<p>As  Thomas notes, Jeremy Hightâ€™s,Â  <strong>â€œidea of channels ties into the concept  of waves = a layer, and people can have many layers on at once.â€</strong></p>
<p>This  is different from the <a href="http://layar.com/" target="_blank">Layar</a> concept of a layer or rather â€œlayar.â€</p>
<p><strong>&#8220;We  are not talking about layers in the classical map layer way of  thinking, where you have a layer of all restaurants or a layer of all  mountain peaks, etc.,&#8221; </strong>notes ARWave developer Markus Strickler.</p>
<p>Currently all geo location apps like Layar have to use their own  servers, so users have to use different clients with different log ins  to see data from different sources.Â  But because ARWave uses federation,  we don&#8217;t depend on centralized infrastructure where the client of one  company can only connect to the server of that company.Â  This opens up  many exciting new possibilities for how people can decide to view and  publish geolocated data.</p>
<p>With AR Wave, via one  login, people can access the whole distributed network of servers (see  diagrams below), and any content will be accessible to them. ARWave will  make it easy for individuals, not just developers, to layer their  environment â€“ allowing the creation of augmented reality content to be  as simple as contributing to a Wave.</p>
<p><strong>â€œARWave  will enable individuals to publish easily to everyoneâ€¦.or just a few  people,â€</strong> Thomas notes:</p>
<p><strong>â€œTo â€˜publishâ€™ is also  self publication and distribution in communities or like minded groups  without the hard read of publication or rejection.â€ = publishing on a  Wave. No one approves it, anyone can publish to communities, or their  friends and family. Or even just personal publishing it for their own  reference.â€</strong></p>
<p>But ARWave does not compete with  existing AR Browsers.Â Â  On the contrary, AR browsers like Layar,  Wikitude and others, could implement ARWave and use it to enhance their  applications.</p>
<p><strong>â€œ<a href="http://layar.com/" target="_blank">Layar</a></strong><strong> has a killer  browser already,Â  ARWave would add social features. They can keep their  â€œwalled gardenâ€ of data and still join the federation of open data too <img src="../wp-includes/images/smilies/icon_smile.gif" alt=":)" /> â€ (Thomas Wrobel)</strong></p>
<p>Yup, that is the cool  part of federation â€“ you can have your cake and eat it too!</p>
<p>Sophia  Parafina and I will be organizing a discussion session on ARWave and  Federation at <a href="http://upcoming.yahoo.com/event/4909659/CA/Mountain-View/WhereCamp-SF/Google-Maxwell-Tech-Talk/CA/Mountain-View/WhereCamp-SF-2010/Google-Maxwell-Tech-Talk/" target="_blank">WhereCamp</a>, right after Where 2.0, April 3rd and 4th, and<a href="http://twitter.com/dlpeters" target="_blank"> Dan Peterson</a> who is in leading the  federation effort for Google Wave will join us.</p>
<p>The  diagrams below illustrate how ARWave and federation can revolutionize  the way we share our augmented realities.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/Screen-shot-2010-03-27-at-6.06.33-PM.png"><img class="alignnone size-medium wp-image-5347" title="Screen shot 2010-03-27 at 6.06.33 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/Screen-shot-2010-03-27-at-6.06.33-PM-300x218.png" alt="Screen shot 2010-03-27 at 6.06.33 PM" width="300" height="218" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/Screen-shot-2010-03-27-at-6.06.00-PM.png"><img class="alignnone size-medium wp-image-5345" title="Screen shot 2010-03-27 at 6.06.00 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/Screen-shot-2010-03-27-at-6.06.00-PM-300x214.png" alt="Screen shot 2010-03-27 at 6.06.00 PM" width="300" height="214" /></a></p>
<h3><strong>Real Time Social Augmented Experiences</strong></h3>
<p>Another key  aspect of ARWave is itâ€™s near to real time update capabilities.Â  As Jeff  Pulver pointed out in, â€œ<a href="http://pulverblog.pulver.com/archives/009156.html" target="_blank"><strong>SXSW  2010: The days twitter became less relevant:â€</strong></a></p>
<p><a href="http://pulverblog.pulver.com/archives/009156.html" target="_blank"><strong> </strong></a><strong>â€œAt  <a href="http://click.bsftransmit1.com/ClickThru.aspx?pubids=6954%7c149%7c09546&amp;digest=j9iIm6%2b67%2fKjaKaD%2bG459g" target="_blank">South By Southwest</a> 2010 (SXSW), a strange thing  happened on the way to Austin. A community of twitter faithful shifted  from sharing everything about everything on only twitter (and maybe  Facebook) and changed their habits to rely on learning about what was  happening and where things were happening by using <a href="http://click.bsftransmit1.com/ClickThru.aspx?pubids=6954%7c140%7c09546&amp;digest=vh5VR%2fg1W2H2FHKwRIGl8g" target="_blank">foursquare</a> and <a href="http://click.bsftransmit1.com/ClickThru.aspx?pubids=6954%7c141%7c09546&amp;digest=SyK27R5EP7LzBWYvodNDpQ" target="_blank">Gowalla</a> instead. Iâ€™m sure there were other products  and platforms being used including <a href="http://click.bsftransmit1.com/ClickThru.aspx?pubids=6954%7c142%7c09546&amp;digest=Nd55%2flEGjFr3lopcn8%2fqiA" target="_blank">Loopt</a> and <a href="http://click.bsftransmit1.com/ClickThru.aspx?pubids=6954%7c143%7c09546&amp;digest=rJYwQX8VJw9Bww36xQ1Lbg" target="_blank">GySPii</a> but foursquare and Gowalla were the dominant  platforms.â€<br />
</strong></p>
<p>Later Jeff wrote:</p>
<p><strong>â€œThere  were times where I could feel the ebbs and the flows of the people move  as different people checked into various locations. While most of this  was felt locally in the place I was in, it also became apparent on the  platforms when hundreds of people would rush to check in to a location.  There were also times when it felt like I was chasing ghosts; These were  the times I would go to a spot because a friend had checked into that  spot only to discover they were no longer there.â€</strong></p>
<p>ARWaveâ€™s  realtime collaborative capabilities are going to introduce some  fascinating dynamics to â€œchasing ghosts,â€ as the  ARWave framework gets integrated into services like foursquare â€“ a  project we have already begun to look at.</p>
<h3><strong>Augmented Reality  Search</strong></h3>
<p>As I mention<a href="../../2010/03/18/visual-search-augmented-reality-and-physical-hyperlinks-for-playfulness-not-just-purchases-talking-with-paige-saez-about-imagewiki/" target="_blank"> in my previous post</a>, ARWave presents some  fascinating possibilities for AR Search.Â  For example, one might do  advanced searching within waves using SPARQL, which could then display  in the form of a personal blip in your viewpoint (which in turn could be  shared with others).Â  Linked data will be massively important in  filtering and delivering useful info for augmented views (<a href="../../2010/03/03/the-game-is-about-the-world-not-dragons-talking-with-will-wright/" target="_blank">see my conversation with Will Wright </a>about the  problem of augmented reality overriding our very smart instincts and not  being useless or worse as a result).</p>
<p>Anselm Hook, who I  interviewed in depth recently about,Â <a title="Permanent Link to Visual Search,  Augmented  Reality and a Social Commons for the Physical World Platform:  Interview  with Anselm Hook" rel="bookmark" href="http://docs.google.com/2010/01/17/visual-search-augmented-reality-and-a-social-commons-for-the-physical-world-platform-interview-with-anselm-hook/">Visual Search, Augmented Reality and a Social Commons  for the Physical World Platform: Interview with Anselm Hook</a>, has  some very interesting thoughts on real time stuff, trading brokerages,  andÂ  the view within a single city block, which he elaborated on in the  second half to this interview which is upcoming on Ugotrade soon!</p>
<h3><strong>The  ARWave Developers</strong></h3>
<p><strong> </strong>There are three  people who unfortunately canâ€™t join us at Where 2.0 â€“ Â the costs of  travelling from Europe being an obstacle. Â But as they have been  developing the code for ARWave that will rock our augmented world, I  asked them, in a Wave conversation, to give me a few comments about  their interest in working on ARWave, and a pic and a short bio. Â  Also I  should mention the work of the PyGoWave team whose incredibly fast work  creating <a id="stt3" title="PyGoWave" href="http://pygowave.net/">PyGoWave</a> has given ARWave a rocket launch pad.Â  Also many thanks to the Wave community, see the <a id="vma_" title="Wave Federation  Protocol documentation" href="http://www.waveprotocol.org/">Wave Federation Protocol documentation</a>, <a id="exsg" title="Google's Wave  Server" href="https://wave.google.com/wave">Google&#8217;s Wave Server</a>, <a id="b:s7" title="RubyOnSails" href="http://wiki.github.com/danopia/ruby-on-sails/">RubyOnSails</a> (Ruby On Rails based Wave server).</p>
<p><a href="http://need2revolt.wordpress.com/" target="_blank"><strong>Davide   Carnovale</strong></a> @need2revolt</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/davide.jpg"><img class="alignnone size-thumbnail wp-image-5349" title="davide" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/davide-150x150.jpg" alt="davide" width="150" height="150" /></a></p>
<p><strong>â€œImho, the coolest  geolocated related thing is that weâ€™re making a world where the info  does not necessarily comes from an explicit search from the user, but  comes also from the actual locaton youâ€™re in. For instance, you can have  special offers in stores like fourquare does, or your friends can leave  geolocated notes for you that are triggered when you walk by.Â  We can  have games based on the treasure hunt schema requiring you to actually  go to specific location.</strong></p>
<p><strong>Other than this I  can think about self-guided tours of the city, maybe user generated  too, or for museums.<br />
</strong></p>
<p><strong>Naturally these are long term  goals with some rl use cases.</strong></p>
<p><strong>As for my  bio, there isnâ€™t much to sayâ€¦ I got a first level degree in computer  science and Iâ€™m taking the second (and last) level. Iâ€™ve developed with  mobile agents, osgart/artoolkit, brain computer interfaces, linux kernel  and thatâ€™s pretty much allâ€¦â€</strong></p>
<p><strong><br />
</strong></p>
<p><strong><a href="http://www.lostagain.nl/" target="_blank">Thomas Wrobel</a></strong></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/Screen-shot-2010-03-28-at-4.35.59-AM.png"><img class="alignnone size-thumbnail wp-image-5354" title="Screen shot 2010-03-28 at 4.35.59 AM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/Screen-shot-2010-03-28-at-4.35.59-AM-150x150.png" alt="Screen shot 2010-03-28 at 4.35.59 AM" width="150" height="150" /></a></p>
<p><strong>&#8220;If you are looking for specific advantages of using Wave I&#8217;d say;<br />
</strong><strong> </strong></p>
<p><strong>*  Federated â€“ Letting creators tap into bigger userbase. Each new app or  data layer will add to the â€œincentiveâ€ for users to join in. Google had  some good stats a few months back as to how much a simple login screen  can put people off using stuff. Â By breaking that barrier it should make  AR userbaseâ€™s grow.</strong></p>
<p><strong>* It deals with user accounts,  permissions, and real-time updating without creators needing to make a  new server standard themselves. It lowers barriers to development.</strong></p>
<p><strong>*  As the clients, servers, and data can be made separately by different  parties, its easier for developers to concentrate on just providing what  they want. You want to just make content? No problem! You dont need to  worry about doing anything else but that. It would become as easy as  making a webpage (or easier!).</strong></p>
<p><strong>* Bots will allow the  development of interactive AR games very easily. Just like modern  version of IRC bots, the infrastructure does the heavy lifting, and  interesting things can be done with just simple scripting.</strong></p>
<p><strong>*  The idea is anyone will be able to make a layer onto the world, and  people can mix, match and share their layers as they wish. Its not just  the data that becomes interesting to see augmenting our world, but the  combinations of data! For example, perhaps you could see the profits  generated by different companies above their buildings, but also see how  environmentally friendly they are at the same time. Or maybe see  pollution levels against health-statistics.Â  Seeing combinations of  geolocated data from different sources at the same time has many  interesting possibilities both for scientific as well as casual (game/  map/ chat) use.</strong></p>
<p><strong>hmz..I could go on forever listing stuff  here reallyâ€¦..</strong></p>
<p><strong>I guess if we are supposed  to be forming a roadmap of significant/interesting things for ARWave?</strong></p>
<p><strong>*  Example clients letting people make their own layers (waves) and add  points to them.</strong></p>
<p><strong>* Letting people log in to different  servers</strong></p>
<p><strong>* Servers federated together. (not our  responsibility, but essential part of the roadmap).</strong></p>
<p><strong>*  Anyone logged into any server can see data from anyone else that&#8217;s shared  with them, regardless of where they are logged into</strong></p>
<p><strong> * 3D  support, demonstrating various sorts of geolocated data.?</strong></p>
<p><strong>*  Use of bots for example games?<br />
â€”-<br />
My Bioâ€™s quite simple.<br />
Studied 3D Animation in Portsmouth, UK.<br />
Moved to the Netherlands,  have since been working in creating ARG games, in the last year founded  Lostagain (Lostagain.nl).â€</strong></p>
<p><strong><br />
</strong></p>
<p><strong><a id="ikdu" title="Markus Strickler" href="http://twitter.com/kusako">Markus  Strickler @kusako</a></strong></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/markus.jpg"><img class="alignnone size-thumbnail wp-image-5350" title="markus" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/03/markus-150x150.jpg" alt="markus" width="150" height="150" /></a></p>
<p><strong>â€œI think the main point behind ARWave is to go beyond simply  displaying existing placemarks on top of a live camera view, towards a  highly personalized, augmented world where everybody can edit and share  localized information collaboratively and in real time. Wave provides  the means to do this through its model of persistent real time  conversations and adds even more by providing a way for personal agents  (robots) to participate in these conversations.</strong></p>
<p><strong>As  for my Bio: Iâ€™ve been developing Web applications for the last 15  years, hold a degree in Image Sciences and am currently working as a  Java developer in Cologne, Germany.â€</strong></p>
]]></content:encoded>
			<wfw:commentRss>http://www.ugotrade.com/2010/03/29/the-next-wave-of-ar-exploring-social-augmented-experiences-at-where-2-0/feed/</wfw:commentRss>
		<slash:comments>3</slash:comments>
		</item>
		<item>
		<title>The Game is about the World not Dragons: Talking with Will Wright about Augmented Reality</title>
		<link>http://www.ugotrade.com/2010/03/03/the-game-is-about-the-world-not-dragons-talking-with-will-wright/</link>
		<comments>http://www.ugotrade.com/2010/03/03/the-game-is-about-the-world-not-dragons-talking-with-will-wright/#comments</comments>
		<pubDate>Thu, 04 Mar 2010 03:29:23 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Artificial general Intelligence]]></category>
		<category><![CDATA[Artificial Life]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[culture of participation]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[mirror worlds]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[social gaming]]></category>
		<category><![CDATA[social media]]></category>
		<category><![CDATA[virtual communities]]></category>
		<category><![CDATA[Virtual Realities]]></category>
		<category><![CDATA[Web 2.0]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[3D Mapping]]></category>
		<category><![CDATA[alternate reality games]]></category>
		<category><![CDATA[are2010]]></category>
		<category><![CDATA[augmented reality event]]></category>
		<category><![CDATA[Blaise Aguera y Arcas]]></category>
		<category><![CDATA[crowd sourced intelligence]]></category>
		<category><![CDATA[DARPA AI]]></category>
		<category><![CDATA[Engage]]></category>
		<category><![CDATA[FourSquare]]></category>
		<category><![CDATA[Games for Learning]]></category>
		<category><![CDATA[Games for Learning Institute]]></category>
		<category><![CDATA[high dynamic lighting photographs]]></category>
		<category><![CDATA[hyper-local experiences]]></category>
		<category><![CDATA[hyper-local search]]></category>
		<category><![CDATA[immersive games]]></category>
		<category><![CDATA[open augmented reality]]></category>
		<category><![CDATA[open distributed augmented reality]]></category>
		<category><![CDATA[proximity based social networks]]></category>
		<category><![CDATA[siri]]></category>
		<category><![CDATA[smart things]]></category>
		<category><![CDATA[Stupid Fun Club]]></category>
		<category><![CDATA[The Sims]]></category>
		<category><![CDATA[The Sims2]]></category>
		<category><![CDATA[Wii]]></category>
		<category><![CDATA[Will Wright]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=5171</guid>
		<description><![CDATA[&#8220;The game is about the world not dragons,&#8221; Will Wright, Founder and Chief ExecutiveÂ  Stupid Fun Club, Creator of Spore and The Sims. I had a brief chat with Will Wright after his talk at Engage!, and I was delighted to hear that augmented reality is high on his agenda at the moment: &#8220;a lot [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><strong><a href="http://www.stupidfunclub.com" target="_blank"><img class="alignnone size-medium wp-image-5200" title="Screen shot 2010-02-22 at 12.26.12 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/02/Screen-shot-2010-02-22-at-12.26.12-PM-300x289.png" alt="Screen shot 2010-02-22 at 12.26.12 PM" width="300" height="289" /></a><br />
</strong></p>
<p><strong>&#8220;The game is about the world not dragons,&#8221; Will Wright, Founder and Chief ExecutiveÂ  <a href="http://www.stupidfunclub.com" target="_blank">Stupid Fun Club, </a>Creator of <a href="http://www.spore.com/" target="_blank">Spore</a> and <a href="http://thesims2.ea.com/" target="_blank">The Sims.</a><br />
</strong></p>
<p>I had a brief chat with <a href="http://en.wikipedia.org/wiki/Will_Wright_%28game_designer%29" target="_blank">Will Wright</a> after his talk at <a href="http://www.engageexpo.com/ny2010/" target="_blank">Engage!</a>, and I was delighted to hear that augmented reality is high on his agenda at the moment:</p>
<p><strong>&#8220;a lot of our stuff is kind of in the experimental format right now, but definitely one of our strong interests is AR.&#8221; </strong></p>
<p>Will Wright will be coming to speak at <a href="http://augmentedrealityevent.com/speakers/" target="_blank">Augmented Reality Event</a>, Santa Clara, CA., June 2nd, 3rd,Â  2010.Â  But, for now, here are a few hints at some of the directions that are intriguing him, e.g., the game potential of 3D mapping like <a href="http://www.ted.com/talks/blaise_aguera.html" target="_blank">Blaise Aguera y Arcas&#8217;sÂ  demo of augmented reality maps at TED</a> -Â  see the full conversation below.</p>
<p>There has been a vital shift, Will Wright points out.Â  Before the Wii,Â  immersive was understood asÂ  how much we were pulled into the world of the game.Â  Now immersive is how much the game pulls us deeper into our world, e.g., our relationship with the people we are playing with as in Rock Band, or engaging with other people&#8217;s crazy antics when playing Wii games.</p>
<h3><strong>&#8220;Computers are imagination amplifiers and toys are imagination constructors.&#8221;</strong></h3>
<p><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/02/computerareimaginatinamplifiers.jpg"><img class="alignnone size-medium wp-image-5183" title="computerareimaginatinamplifiers" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/02/computerareimaginatinamplifiers-300x290.jpg" alt="computerareimaginatinamplifiers" width="300" height="290" /></a><br />
</strong></p>
<p><em>The slide above is from Will Wright&#8217;s talk at <a href="http://www.engageexpo.com/ny2010/" target="_blank">Engage!</a> </em></p>
<p>Will Wright&#8217;s talk was extraordinary, dense, layered, and deeply thought provoking.<strong><br />
</strong></p>
<p>I have picked out a few samples from Will Wright&#8217;s vast tome of slides here.Â  They are just a glimpse of the many insights he offered.Â  If you are still wondering what will transform augmented reality into a mainstream experience, I suggest studying this talk carefully (I think the audio will be posted on the <a href="http://www.engageexpo.com/ny2010/" target="_blank">Engage! web site</a>).Â  Also watch Will Wright&#8217;s, <a href="http://g4li.org/" target="_blank">Games For Learning Institute </a>talk at NYU, February 17th, 2010, <a href="http://g4li.org/archives/1986" target="_blank">archived here</a>.</p>
<p>Will Wright and <a href="http://www.stupidfunclub.com/home.html">Stupid Fun Club</a> are getting ready to takes us to the next level of imagination amplification and construction.</p>
<h3><strong>&#8220;Smart&#8221; things can make us dumber by overriding our instincts<br />
</strong></h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/02/replacingourinstincts.jpg"><img class="alignnone size-medium wp-image-5182" title="replacingourinstincts" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/02/replacingourinstincts-300x199.jpg" alt="replacingourinstincts" width="300" height="199" /></a></p>
<p>Just one of the many wonderful anecdotes Will Wright toldÂ  was the story of his experiences with a new &#8220;smart&#8221; car (he bought this car with the intent of exploring the pinnacle of the &#8220;smart&#8221; car experience). <em>The slide above is from Will Wright&#8217;s talk at <a href="http://www.engageexpo.com/ny2010/" target="_blank">Engage!</a></em></p>
<p>Increasingly, artifacts are being designed to send us more and more data, and this car was endowed with an array of sensors supplying data aimed at assisting parallel parking &#8211; a notoriously challenging aspect of driving.Â  But the carÂ  failed miserably in helping. Â  While parallel parking had been easy for him prior to being deluged with all this data, Will Wright pointed out, ironically, he had to learn to ignore this stuff to park the &#8220;smart&#8221; car.</p>
<p>Instinctively, we learn to filter the information necessary for parking to the relevant stuff.Â  This kind of pre-conscious filtering is a key challenge for augmented reality, and one that Will Wright, as a game designer, has given great deal of thought to.</p>
<p>As Will Wright pointed out, aÂ  lot of our ideas about augmented reality, and sensor enabled artifacts, are rooted in trying to give us more data, to &#8220;take over our instincts.&#8221; Â  Not only do these artifacts attempt to give us more data, which as in the case of the HUDs for parallel parking can get in the way of our own highly effective intuitive instincts.Â  But, as Will Wright also noted, these artifacts also have more data which they can deploy independently to override our instincts, e.g., the car detecting your head has turned back to talk to a passenger and applying the brakes!</p>
<p><strong><br />
</strong></p>
<h3><strong>&#8220;Toys Encourage Agency&#8221;</strong></h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/02/Screen-shot-2010-02-19-at-3.14.53-AM.png"><img class="alignnone size-medium wp-image-5188" title="Screen shot 2010-02-19 at 3.14.53 AM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/02/Screen-shot-2010-02-19-at-3.14.53-AM-300x200.png" alt="Screen shot 2010-02-19 at 3.14.53 AM" width="300" height="200" /></a></p>
<p>Toys can be the antidote to instinct blocking &#8220;smart things.&#8221;Â  In contrast to &#8220;smart&#8221; data spitting cars that &#8220;take over&#8221; our instincts, toys encourage agency.Â  Will Wright gave the example ofÂ  high dynamic lighting photographs that make the world &#8220;toy like&#8221; and encourage us want to reach in and play with it (<a href="http://hdrcreme.com/photos/36-Sunset" target="_blank"><em>photo above from HDRCreme</em></a>).</p>
<h3>&#8220;What Computers are really good at is harvesting human intelligence&#8221;</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/02/HiveMind1.jpg"><img class="alignnone size-medium wp-image-5194" title="HiveMind" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/02/HiveMind1-300x199.jpg" alt="HiveMind" width="300" height="199" /></a></p>
<p>Another key insight that Will Wright explored in depth in his talk was the significance ofÂ  crowd sourced intelligence (<em>the slide above is from Will Wright&#8217;s talk at <a href="http://www.engageexpo.com/ny2010/" target="_blank">Engage!</a>)</em>.Â  If the crowd is training the filter, he suggested to me, this might build the kind of context we need to build meaningful augmented reality experiences (for more on this see the conversation below).</p>
<h3>Talking with Will Wright at Engage!, NYC, 2010</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/02/WillWright2.jpg"><img class="alignnone size-medium wp-image-5174" title="WillWright2" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/02/WillWright2-277x300.jpg" alt="WillWright2" width="277" height="300" /></a></p>
<p><strong>Tish Shute:</strong> I was very interested by the idea you put out that this deluge of information gathered by sensors is not necessarily a kind of nirvana for augmented reality, in fact it can be just the opposite.Â  In the embryonic world of augmented reality, we have two streams it seems at the moment &#8211; one is the idea of a kind of like hyper local nirvana imagined for AR, in which we get information relevant to us, when and where we need it. Â  But you talked about some of the problems in realizing this, didn&#8217;t you?Â  The other strandÂ  is the emerging stream of play which you are exploring..</p>
<p><strong>Will Wright:</strong> Right.Â  I think part of it is like what I was talking about-the way our senses are set up to know how to filter out 99% of what is coming into them.Â  That is why they work, and that is what is beneficial.Â  I think that is why AR needs to focus onâ€¦</p>
<p>You look at what I can find out on Google or whatever, the amount of information is just astronomical.Â  The hard part, the intelligent part, is how do you figure out that one tenth of 1% that I actually care about at this given second?</p>
<p><strong>Tish Shute: </strong> Yes.Â  Have you seen any examples of AR beginning to do that?</p>
<p><strong>Will Wright: </strong> No, not at all.Â  I think that you have to have a contextual understanding of where I am at, where my mindset is, what my situation is, what my goal state is in a moment by moment basis.Â  And then it is still a complex task.Â  But the very first thing we need is more context for building a filter.Â  See, that filter is changing every few minutes, you know, what I am filtering into my senses is changing, and my context is changing moment to moment.</p>
<p><strong><br />
Tish Shute: </strong> I really liked your emphasis on crowd sourced intelligence as the key power of a networked world, is this the seed..?</p>
<p><strong>Will Wright:</strong> Well, you can imagine crowd sourcing that filterâ€¦it would affect a million people and get a sense of what mental context that they were in and what filter they turned on.Â  And so, in a sense, the crowd is training the filter.</p>
<p><strong>Tish Shute:</strong> Yes.Â  The problem with projects like <a href="http://siri.com/" target="_blank">SIRI</a>, that is driven by the big DARPA AI project, CALO, is it is centralized &#8211; although I am not sure what they intend to do in terms of crowd source corrections?Â  But if it was all open and we could crowd source as well that would be interesting.Â  But in the end we need a framework for AR that is as open as the internet, don&#8217;t we?</p>
<p><strong><br />
Will Wright:</strong> Right.Â  I think the technological infrastructure needs to be much lighter so that it can be grounded in more like a Twitter feed or something.</p>
<p><strong>Tish Shute:</strong> Yeah.Â  Iâ€™m actually working on a project using the Wave Federation protocol as the basis for a<a href="http://arwave.wiki.zoho.com/HomePage.html" target="_blank">n open communications framework for augmented reality, AR Wave</a> &#8211; not the Wave user interface, just the real-time federation protocol.Â  But, of course,Â  for it to become an open framework that could be a vehicle for crowd trained augmented reality it would need good take-up!</p>
<p><strong>Will Wright: </strong> Right.Â  You really want a million people involved.</p>
<p><strong>Tish Shute:</strong> Yes our dream is that the creation of augmented reality content will be as open, accessible andÂ  simple as making an html page, or contributing to a wiki.</p>
<p>So in terms of AR games what is interesting on the horizon, presumably games also have to solve the problems ofÂ  delivering a hyper local experience.Â  The car that you described in your talk tried hard to use augmented reality to solve the problem of parallel parking and ended up making it harder.Â  So giving us the information we need, where we need it, when we need it, and specific to who we are is going to be a big challenge.Â  But I mean in terms of games, what kinds of hyper local experiences will be most fun and what have you seen that is interesting in terms of augmented reality games up to now?</p>
<p><strong>Will Wright: </strong> Iâ€™ve not actually seen much at all.Â  Iâ€™ve seen people doing interesting stuff with like Google Maps.Â  They arenâ€™t really entertainment oriented, but I think you can start thinking aboutâ€¦</p>
<p>I mean I think for a lot of people, Google Street View is entertainment.Â  But I havenâ€™t really seen something that was really leaning into an entertainment application using existing technology and data that is already out there.</p>
<p>I mean I have seen some cool experiments-people playing Pac-Man in Washington Square and stuff like that, but nothing really serious.</p>
<p><strong><br />
Tish Shute: </strong>Yeah.Â  of course I think one of the missing links is that the barrier of entry is way to high for creating social augmented experiences for smart phones, and as you point out in your talk it is the social implications of the game is what makes it compelling.</p>
<p><strong>Will Wright: </strong> Also, I think using them [smart phones] as data aggregation devices rather than just data consumption devicesâ€¦so that people out there are using their phone, cameras, microphones, or whatever to gather data and get an experience where they are rewarded for gathering data.</p>
<p><strong>Tish Shute: </strong> Like <a href="http://foursquare.com/" target="_blank">foursquare</a> where you get the badges, and people can become the mayor of like a cafe or something.</p>
<p><strong>Will Wright:</strong> Right.Â  Yeah, you can imagine people using their phones to actually kind of pull informationâ€¦</p>
<p><strong>Tish Shute: </strong> A Dutch developer/artist/game designer, Thomas Wrobel,Â  <a href="http://www.lostagain.nl/" target="_blank">Lost Again</a>, came up with the original concept for the AR framework we are building on the Wave Federation protocol.Â  Thomas and his partner Bertine van Hovell design alternate reality games, amongst other things they doâ€¦so they are deeply immersed in the potential of the world as game.</p>
<p><strong>Will Wright:</strong> Yeah, one of my programmers actually works in Amsterdamâ€¦.there is a whole sub-communityâ€¦<br />
Well, yeah.Â  The possibilities are tremendous.Â  And Wii is actually training us that way [to be as much engaged with the other players in the physical space as the virtual game], so it is going to happen.</p>
<p><strong>Tish Shute: </strong> What are the most exciting things you see at the moment, and for the next 12 months for augmented reality?</p>
<p><strong>Will Wright:</strong> Gosh.Â  I mean I just think there is cool stuff happening in mapping, in general.<strong><br />
</strong></p>
<p><strong>Tish Shute:</strong> Like <a href="http://www.ted.com/talks/blaise_aguera.html" target="_blank">Blaise Aguera y Arcas&#8217;sÂ  demo of augmented reality maps at TED?</a></p>
<p><strong>Will Wright: </strong> Yeah, I thought the 3-D mapping with Microsoftâ€¦I think like the next level of that is going to be really compelling.</p>
<p><strong>Tish Shute:</strong> You see game potentials in that?</p>
<p><strong>Will Wright: </strong> Yeah.Â  You start overlaying really cool game potential on top of that.</p>
<p><strong>Tish Shute:</strong> Might you get interested and do something?</p>
<p><strong>Will Wright:</strong> Oh, yeah.Â  I mean in terms of games, that is one of my biggest interests, is AR.</p>
<p><strong>Tish Shute: </strong>Are you allowed to talk about anything specific at all?</p>
<p><strong>Will Wright:</strong> Not yet, no.Â  I mean a lot of our stuff is kind of in the experimental format right now, but definitely one of our strong interests is AR.</p>
<p><strong>Tish Shute: </strong> Yeah, absolutely.Â  We are over being tied to our desks to use computers -we want to be doing it anywhere, anytime, with anythingâ€¦</p>
<p><strong>Will Wright: </strong> Now the game is about the world instead of about dragons.Â  I love that.</p>
<p><em><a href="http://www.stupidfunclub.com/home.html"></a></em></p>
]]></content:encoded>
			<wfw:commentRss>http://www.ugotrade.com/2010/03/03/the-game-is-about-the-world-not-dragons-talking-with-will-wright/feed/</wfw:commentRss>
		<slash:comments>11</slash:comments>
		</item>
		<item>
		<title>Augmented Reality DevCamp NYC: The Big ARNY &#8211; A Collaborative AR Game Project Modeled After Swarm of Angels</title>
		<link>http://www.ugotrade.com/2009/12/06/augmented-reality-devcamp-nyc-the-big-arny-a-collaborative-ar-game-project-modeled-after-swarm-of-angels/</link>
		<comments>http://www.ugotrade.com/2009/12/06/augmented-reality-devcamp-nyc-the-big-arny-a-collaborative-ar-game-project-modeled-after-swarm-of-angels/#comments</comments>
		<pubDate>Sun, 06 Dec 2009 13:20:50 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Android]]></category>
		<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[culture of participation]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[iphone]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[social gaming]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[AR]]></category>
		<category><![CDATA[AR DevCamp]]></category>
		<category><![CDATA[AR DevCampNYC]]></category>
		<category><![CDATA[AR Wave]]></category>
		<category><![CDATA[ardevcamp]]></category>
		<category><![CDATA[ARDevCampNYC]]></category>
		<category><![CDATA[aygmented reality]]></category>
		<category><![CDATA[Goblin XNA]]></category>
		<category><![CDATA[Google Wave Protocol for AR]]></category>
		<category><![CDATA[marker based augmented reality]]></category>
		<category><![CDATA[markerless augmented reality]]></category>
		<category><![CDATA[Microvision]]></category>
		<category><![CDATA[mobile social augmented reality]]></category>
		<category><![CDATA[mobile social games]]></category>
		<category><![CDATA[open augmented reality]]></category>
		<category><![CDATA[open distributed augmented reality]]></category>
		<category><![CDATA[semantic web and augmented reality]]></category>
		<category><![CDATA[social augmented experiences]]></category>
		<category><![CDATA[social augmented reality]]></category>
		<category><![CDATA[The Big ARNY]]></category>
		<category><![CDATA[The Big ARNY Game]]></category>
		<category><![CDATA[The Open Planning Project]]></category>
		<category><![CDATA[TOPP]]></category>
		<category><![CDATA[TOPPLABS]]></category>
		<category><![CDATA[Wave enabled AR]]></category>
		<category><![CDATA[Wave Federation Protocol]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=4996</guid>
		<description><![CDATA[First an incredibly big thank you to The Open Planning Project office (TOPP) &#8211; @TOPPLabs, and Sophia Parafina, @spara,Â  for organizing, hosting,Â  sponsoring and providing so much inspiration for this event. There is an incomplete list of attendees below, and there were about 70 people at one point watching the Ustream (thank you Dimitri Darras [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><object classid="clsid:d27cdb6e-ae6d-11cf-96b8-444553540000" width="400" height="300" codebase="http://download.macromedia.com/pub/shockwave/cabs/flash/swflash.cab#version=6,0,40,0"><param name="flashvars" value="offsite=true&amp;lang=en-us&amp;page_show_url=%2Fphotos%2Fugotrade%2Fsets%2F72157622945515856%2Fshow%2F&amp;page_show_back_url=%2Fphotos%2Fugotrade%2Fsets%2F72157622945515856%2F&amp;set_id=72157622945515856&amp;jump_to=" /><param name="allowFullScreen" value="true" /><param name="src" value="http://www.flickr.com/apps/slideshow/show.swf?v=71649" /><param name="allowfullscreen" value="true" /><embed type="application/x-shockwave-flash" width="400" height="300" src="http://www.flickr.com/apps/slideshow/show.swf?v=71649" allowfullscreen="true" flashvars="offsite=true&amp;lang=en-us&amp;page_show_url=%2Fphotos%2Fugotrade%2Fsets%2F72157622945515856%2Fshow%2F&amp;page_show_back_url=%2Fphotos%2Fugotrade%2Fsets%2F72157622945515856%2F&amp;set_id=72157622945515856&amp;jump_to="></embed></object></p>
<p>First an incredibly big thank you to <a title="http://openplans.org/contact/" rel="nofollow" href="http://openplans.org/contact/">The Open Planning Project office (TOPP)</a> &#8211; <a href="http://twitter.com/TOPPLabs" target="_blank">@TOPPLabs,</a> and Sophia Parafina, <a href="http://twitter.com/spara" target="_blank">@spara</a>,Â  for organizing, hosting,Â  sponsoring and providing so much inspiration for this event.</p>
<p>There is an incomplete list of attendees below, and there were about 70 people at one point watching the Ustream (thank you <a href="../../tridarras.com/#http://www.dimitridarras.com/images/dd_work.jpg" target="_blank">Dimitri Darras</a> and friend &#8211; sorry I missed getting your card!) for setting this up.</p>
<p>There were at least ten or more people participating in a live skype conference moderated by Sophia with great skill.</p>
<p>I am sorry I didn&#8217;t get everyone&#8217;s contact info.Â  But please feel free to add you name into the comments of this post if I have missed you out.</p>
<p>After a gearheady morning, we spent the afternoon and evening brain storming the &#8220;The Big ARNY&#8221; &#8211; &#8220;a collaborative game development project modeled after a <a href="http://aswarmofangels.com/" target="_blank">Swarm of Angels</a>.&#8221;</p>
<p>Some of the morning tech discussion highlights included:</p>
<p>*skype presentations on <a href="http://arwave.wiki.zoho.com/HomePage.html" target="_blank">AR Wave</a> from <a href="http://www.lostagain.nl/" target="_blank">Thomas Wrobel</a>, <a href="http://www.joelamantia.com/" target="_blank">Joe Lamantia, </a><a href="http://matthieupierce.com/" target="_blank">Matthieu Pierce</a>.</p>
<p>*the <a href="http://www.youtube.com/watch?v=h4HmYQPejFk">beginnings of an iphone client</a> from the <a href="http://code.google.com/p/pygowave-server/" target="_blank">PyGoWave</a> Crew.</p>
<p>*discussing <a href="http://www.microvision.com/wearable_displays/index.html" target="_blank">Microvision</a>, Augmented Reality eyewear &#8211; and trying out<a href="http://twitpic.com/s9zjt"> Nomad Unit</a> courtesy of <a href="http://augmentation.wordpress.com/" target="_blank">Noah Zerkin</a>, @NoaZark</p>
<p>*an awesome deep dive into the code of the <a href="http://www1.cs.columbia.edu/~ohan/" target="_blank">open Goblin XNA VR/AR platform</a> &#8211; courtesy of <a href="http://www.cs.columbia.edu/~ohan/" target="_blank">Ohan Oda</a> (pic below) <a href="http://www.ustream.tv/recorded/2719336" target="_blank">video of presentation here</a>.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/12/ohanodapost.jpg"><img class="alignnone size-medium wp-image-5016" title="ohanodapost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/12/ohanodapost-300x199.jpg" alt="ohanodapost" width="300" height="199" /></a></p>
<p>Ori Inbar<a href="http://gamesalfresco.com/2009/12/05/live-from-nyc-augmented-reality-dev-camp/" target="_blank"> live blogged the morning sessions on Games Alfresco. </a></p>
<p>But, during the afternoon, Ori presented, and we all got so caught up in the brainstorming ofÂ  &#8220;The Big ARNY Game&#8221; that live blogging, skyping, and twittering ground to a near halt.Â Â  The &#8220;meat space&#8221; (perhaps the slide show captures some ofÂ  incredible coolness of the location) was alive with brilliant ideas that were matched by an incredibly high level of technical input &#8211; see the AR DevCamp attendees list below.</p>
<p>During the game session we really had a master class in augmented reality tech. Â  <a href="http://www1.cs.columbia.edu/~feiner/" target="_blank">Steven Feiner&#8217;s</a> awesome discussion of markers really opened my mind to exploring markers in a new way.Â  And the geolocated data discussion with Sophia Parafina, <a href="http://www.maploser.com/?page_id=6" target="_blank">Kate Chapman,</a> <a href="http://phil.ashlock.us/" target="_blank">Philip Ashlock</a>,Â  and Steve Feiner at dinner was very interesting.Â  The opportunity to break out into smaller in depth discussions during the day was one of the valuable opportunities of AR DevCamp, so I can&#8217;t possibly mention them all.Â  But thank you everybody!</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/12/BigARNYpost.jpg"><img class="alignnone size-medium wp-image-5005" title="BigARNYpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/12/BigARNYpost-300x199.jpg" alt="BigARNYpost" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/12/ardevcampnycpost.jpg"><img class="alignnone size-medium wp-image-5013" title="ardevcampnycpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/12/ardevcampnycpost-300x199.jpg" alt="ardevcampnycpost" width="300" height="199" /></a></p>
<p>We did have some fun with low tech AR too &#8211; courtesy of Thomas Wrobel &amp; Bertine van HÃ¶vell, <strong><a href="http://www.lostagain.nl/" target="_blank">Lost Again</a></strong> (their business card is the coolest AR card I have seen to date).Â  In the pic on the right, I try out their business card/AR overlay on @comogard as he presents.Â  The lighting does not do the overlay justice in my photo (on right), but I think you get the idea at least.</p>
<p>Unfortunately we didn&#8217;t manage to hook up our afternoon live session with <a href="http://www.ardevcamp.org/wiki/index.php?title=Main_Page" target="_blank">The Mountain View AR DevCamp</a>, as we lost the streaming laptop.Â  But hopefully we will be able to catch up on each other&#8217;s activities with session notes on the<a href="http://www.ardevcamp.org/wiki/index.php?title=Main_Page" target="_blank"> AR DevCamp Wiki.</a> There is also a public wave,Â  <a href="https://wave.google.com/wave/#restored:wave:googlewave.com!w%252BTfPQziYJA" target="_blank">AR Dev Camp NYC Shared Notes</a>.</p>
<p><a href="http://www1.cs.columbia.edu/~swhite/" target="_blank">Sean White</a> set the afternoon off to a great start by collecting topics and organizing topics of interest on the board.Â  While we didn&#8217;t get time to cover everything, it was interesting how, by working on developing a collaborative game project, we had to tackle many of the topics suggested, and come up with workable approaches.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/12/Seanwhitenotespost.jpg"><img class="alignnone size-medium wp-image-5006" title="Seanwhitenotespost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/12/Seanwhitenotespost-300x199.jpg" alt="Seanwhitenotespost" width="300" height="199" /></a></p>
<p>Next weekend <a title="http://openplans.org/contact/" rel="nofollow" href="http://openplans.org/contact/">TOPP</a> <span><span> will host the <a href="http://opennyforum.org/2009/11/open-ny-summit-09/" target="_blank">OpenNY Summit &amp; Codeathon</a> on Dec 11 &amp; 12, an event &#8220;</span></span><span>produced by open government practitioners and volunteers.&#8221;Â  This would be another great place to explore some of the citizen 2.0 mobile, social AR game ideas that came up at AR DevCampNYC.  In addition, Ori Inbar has started an <a href="http://www.meetup.com/ARNY-Augmented-Reality-New-York/">AR New York Meetup</a>.<br />
</span></span></p>
<p>Below is an incomplete list of AR DevCampNYC attendees.</p>
<p><strong>Sophia Parafina</strong>, OpenGeo, @spara, organizer<strong> </strong></p>
<p><strong>Marco Neumann</strong>, <a href="http://www.konallc.com/" target="_blank">KONA</a>, @neumarcx, interested in developing Semantic Web based Augmented Reality demos.<strong> </strong></p>
<p><strong>Tish Shute</strong>, <a title="http://www.ugotrade.com" rel="nofollow" href="../../">Web</a>,<a title="http://twitter.com/tishshute" rel="nofollow" href="http://twitter.com/tishshute">@tishshute</a>, Open Distributed AR, Google Wave Protocol for AR, Imagining the Future of the Outernet<strong> </strong></p>
<p><strong>Dimitri Darras</strong>, @dimitridarras, Visual designer, web developer, and virtual worlds content creator. Interested in multimodal input and AR/Virtual Worlds integration.<strong></strong></p>
<p><strong>Heidi Hysell</strong>, @heidihysell, Creative Technologist &amp; Software Engineer; Interested in the application of AR for entertainment technology for print, web and video.<strong></strong></p>
<p><strong>Joe Lamantia</strong>, Muku / ARWave, <a title="http://joelamantia.com" rel="nofollow" href="http://joelamantia.com/">@mojoe</a> Amsterdam, interested in creating open frameworks, social augmented experiences, emerging media &#8211; (attending via skype)<strong></strong></p>
<p><strong>Kate Chapman</strong>, Web Developer, FortiusOne, @wonderchook<strong></strong></p>
<p><strong>Matthieu Pierce</strong>, <a title="http://matthieupierce.com" rel="nofollow" href="http://matthieupierce.com/">itinerant poet</a>, @matthieupierce, Pittsburgh, PA.  Interested in <a title="AR Use Cases" href="http://www.ardevcamp.org/wiki/index.php?title=AR_Use_Cases">AR Use Cases</a> and observation. Attending via Skype.</p>
<p><strong>Ori Inbar</strong>, <a title="http://ogmento.com" rel="nofollow" href="http://ogmento.com/">ogmento</a> <a title="http://gamesalfresco.com" rel="nofollow" href="http://gamesalfresco.com/">games alfresco</a> Let&#8217;s get together to brainstorm on the &#8220;Big AR NY Game&#8221;: The first location-based, social, augmented reality game designed for New York by New Yorkers.<strong></strong></p>
<p><strong>Noah Zerkin</strong>, <a title="http://augmentation.wordpress.com" rel="nofollow" href="http://augmentation.wordpress.com/">[1]</a> &#8211; AR software and hardware interfaces; Exploring the idea of an AROS.<strong></strong></p>
<p><strong>Ohan Oda</strong>, <a title="http://www.cs.columbia.edu/~ohan" rel="nofollow" href="http://www.cs.columbia.edu/%7Eohan">webpage</a> &#8211; Columbia University; NYC<strong></strong></p>
<p><strong>Sean White</strong>, <a title="http://www.cs.columbia.edu/~swhite" rel="nofollow" href="http://www.cs.columbia.edu/%7Eswhite">webpage</a> &#8211; Augmented reality research at Columbia University and Smithsonian Institution.<strong></strong></p>
<p><strong>Steve Henderson</strong>, Columbia University, <a title="http://www.cs.columbia.edu/~henderso" rel="nofollow" href="http://www.cs.columbia.edu/%7Ehenderso">webpage</a>, <a title="http://twitter.com/stevehenderson" rel="nofollow" href="http://twitter.com/stevehenderson">@stevehenderson</a><strong></strong></p>
<p><strong>Omer Gunes</strong>, [<a title="http://www.cs.nyu.edu/~ofg201" rel="nofollow" href="http://www.cs.nyu.edu/%7Eofg201">[2]</a> webpage] &#8211; NLP, Speech Recognition, Mobile Software Development<strong></strong></p>
<p><strong>Steve Feiner</strong>, Computer Graphics and User Interfaces Lab, Dept. of Computer Science, Columbia University, <a title="http://www.cs.columbia.edu/~feiner" rel="nofollow" href="http://www.cs.columbia.edu/%7Efeiner">personal</a>, <a title="http://www.cs.columbia.edu/graphics/top.html" rel="nofollow" href="http://www.cs.columbia.edu/graphics/top.html">lab</a> &#8211; Augmented reality, mobile/wearable computing.<strong></strong></p>
<p><strong>Jon Russek</strong>, NYC, <a title="http://www.russek.org" rel="nofollow" href="http://www.russek.org/">website</a>, <a title="http://twitter.com/filmaddict" rel="nofollow" href="http://twitter.com/filmaddict">@filmaddict</a> &#8211; AR as applied to film/theater/art.<strong></strong></p>
<p><strong>Daniel Leslie</strong>, <a title="http://reflexionsdata.com" rel="nofollow" href="http://reflexionsdata.com/">Reflexions Data, LLC</a> <a title="http://twitter.com/dan_leslie" rel="nofollow" href="http://twitter.com/dan_leslie">@dan_leslie</a>, Principal at application consulting/development firm where we&#8217;re working on a mobile app for proximity-based real time social graph analysis.<strong></strong></p>
<p><strong>Donald Schwartz</strong>, NYC, <a title="http://twitter.com/Ishkahbibel" rel="nofollow" href="http://twitter.com/Ishkahbibel">@Ishkahbibel</a>virtual worlds, social media, technology writer</p>
<p><strong>David Oliver</strong>, <a title="http://olivercoady.com" rel="nofollow" href="http://olivercoady.com/">Oliver+Coady, Inc. NYC</a>, <a title="http://twitter.com/davidmoliver" rel="nofollow" href="http://twitter.com/davidmoliver">@davidmoliver</a> mobile strategy, mobile product definition, mobile development.</p>
<p><strong>Chris Grayson</strong>, NYC, Twitter: <a title="http://twitter.com/chrisgrayson" rel="nofollow" href="http://twitter.com/chrisgrayson">@chrisgrayson</a> | Blog: <a title="http://gigantico.squarespace.com" rel="nofollow" href="http://gigantico.squarespace.com/">GigantiCo</a> | Contributor: <a title="http://hplusmagazine.com" rel="nofollow" href="http://hplusmagazine.com/">H+ Magazine</a> | Web developer and marketing consultant &#8212; Interests: Future of commercial mobile AR / Outernet (GeoSearch &amp; OOH marketing convergence); Future AR Form Factors; AR/Virtual Worlds integration re: distance learning &amp; collaboration.</p>
<p><strong>Saul Devitt</strong>, NYC<strong></strong></p>
<p><strong>Bert Picot</strong>, NYC via Skype probably around 10:30 am for a few hours. Very interested in learning the value chain for AR applications and the development of applications for Festivals and live entertainment.</p>
<p><strong>MZ </strong>â€“ startup to develop a platform to use semantic data to enable virtual worlds</p>
<p><strong>Jon Russek</strong> â€“ film production + law + internet. Interested in AR as artistic medium for creativity</p>
<p><strong>Davide Byron</strong> â€“ developed the game <a href="http://www.youtube.com/watch?v=k2BK9VAk3RY" target="_blank">Spads and Fokkers</a> and <a href="http://spadsandfokkers.sourceforge.net/" target="_blank">code</a></p>
<p><strong>Philip Ashlock </strong><a href="http://twitter.com/philipashlock" target="_blank">@philipashlock</a>, The Open Planning Project</p>
<p><span><strong>Michael Keating</strong>, The Open Planning Project</span></p>
<p><strong>Yohan Baillot</strong>, <a title="http://twitter.com/yohanBaillot" rel="nofollow" href="http://twitter.com/yohanBaillot">@yohanBaillot</a> future of commercial mobile AR, emerging AR standards</p>
]]></content:encoded>
			<wfw:commentRss>http://www.ugotrade.com/2009/12/06/augmented-reality-devcamp-nyc-the-big-arny-a-collaborative-ar-game-project-modeled-after-swarm-of-angels/feed/</wfw:commentRss>
		<slash:comments>6</slash:comments>
		</item>
		<item>
		<title>The AR Wave Project: An Introduction and FAQ by Thomas Wrobel</title>
		<link>http://www.ugotrade.com/2009/12/04/ar-wave-project-an-introduction-and-faq-by-thomas-wrobel/</link>
		<comments>http://www.ugotrade.com/2009/12/04/ar-wave-project-an-introduction-and-faq-by-thomas-wrobel/#comments</comments>
		<pubDate>Sat, 05 Dec 2009 02:50:18 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[open source]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[social gaming]]></category>
		<category><![CDATA[social media]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Web 2.0]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[AR]]></category>
		<category><![CDATA[AR Blps]]></category>
		<category><![CDATA[AR DevCamp]]></category>
		<category><![CDATA[AR Network]]></category>
		<category><![CDATA[AR Wave]]></category>
		<category><![CDATA[AR Wave project]]></category>
		<category><![CDATA[AR Wave Wiki]]></category>
		<category><![CDATA[ARBlip]]></category>
		<category><![CDATA[ARDevCampNYC]]></category>
		<category><![CDATA[ARN]]></category>
		<category><![CDATA[Augmented Realit]]></category>
		<category><![CDATA[augmented reality network]]></category>
		<category><![CDATA[distributed augmented reality]]></category>
		<category><![CDATA[Goggle Wave Federation Protocol]]></category>
		<category><![CDATA[Google Wave]]></category>
		<category><![CDATA[Joe Lamantia]]></category>
		<category><![CDATA[layers and channels of augmented reality]]></category>
		<category><![CDATA[markerless augmented reality]]></category>
		<category><![CDATA[multiuser multisource augmented reality]]></category>
		<category><![CDATA[open augmented reality network]]></category>
		<category><![CDATA[open distributed augmented reality]]></category>
		<category><![CDATA[pygowave]]></category>
		<category><![CDATA[PyGoWave Qt-Based Desktop Client]]></category>
		<category><![CDATA[shared augmented realities]]></category>
		<category><![CDATA[social augmented experiences]]></category>
		<category><![CDATA[Sophia Parafina]]></category>
		<category><![CDATA[storing geolocated data on Wave Servers]]></category>
		<category><![CDATA[Thomas Wrobel]]></category>
		<category><![CDATA[Wave enabled augmented reality]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=4960</guid>
		<description><![CDATA[ImagesÂ  from Mitsuo Iso&#8217;s Denno Coil (Click to enlarge), the game &#8220;Metroid Prime,&#8221; and Terminator. Thomas Wrobel, Sophia Parafina, Joe Lamantia, Matthieu Pierce, and I will lead a Â session tomorrow for AR DevCampNYC introducing the AR Wave Project.Â  Thomas, Joe and Matthieu will be participate via skype (10am to 11.30am EST), and Sophia Parafina and [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/12/Screen-shot-2009-12-04-at-7.56.58-PM.png"></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/12/Screen-shot-2009-12-04-at-6.43.24-PM.png"><img class="alignnone size-medium wp-image-4961" title="Screen shot 2009-12-04 at 6.43.24 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/12/Screen-shot-2009-12-04-at-6.43.24-PM-300x181.png" alt="Screen shot 2009-12-04 at 6.43.24 PM" width="300" height="181" /></a><br />
</strong></p>
<p><em>ImagesÂ  from Mitsuo Iso&#8217;s<a href="http://en.wikipedia.org/wiki/Denn%C5%8D_Coil" target="_blank"> Denno Coil</a> (Click to enlarge), the game &#8220;Metroid Prime,&#8221; and Terminator.</em></p>
<p><a href="http://www.lostagain.nl/" target="_blank">Thomas Wrobel</a>, <a href="http://opengeo.org/about/team/sophia.parafina/" target="_blank">Sophia Parafina</a>, <a href="http://www.joelamantia.com/" target="_blank">Joe Lamantia, </a><a href="http://matthieupierce.com/" target="_blank">Matthieu Pierce</a>, and I will lead a Â session tomorrow for<a href="http://www.ardevcamp.org/wiki/index.php?title=Main_Page" target="_blank"> </a><a href="http://www.ardevcamp.org/wiki/index.php?title=NYC_ardevcamp" target="_blank">AR DevCampNYC</a> introducing the AR Wave Project.Â  Thomas, Joe and Matthieu will be participate via skype (10am to 11.30am EST), and Sophia Parafina and I will both be at <a href="http://www.ardevcamp.org/wiki/index.php?title=NYC_ardevcamp" target="_blank">AR DevCampNYC</a> at the <a title="http://openplans.org/contact/" rel="nofollow" href="http://openplans.org/contact/">The Open Planning Project office (TOPP)</a>.Â  The <a href="http://pygowave.net/" target="_blank">PyGoWave</a> crew will be introducing <a href="http://livestream.com/pygowave" target="_blank">PyGoWave via LiveStream</a>.</p>
<p>At 1.30pm EST to 2.30pm EST there will be a shared <a href="http://pygowave.net/" target="_blank">PyGoWave</a>/AR Wave session <a href="http://www.ardevcamp.org/wiki/index.php?title=Main_Page" target="_blank">with Mountain View </a>(if bandwidth permits).</p>
<p>The skype conference will be at ardevcampnyc . Â To participate in Wave,Â  please join the public Wave, Â <a href="https://wave.google.com/wave/#restored:wave:googlewave.com!w%252BH83lcj6RA" target="_blank">AR Wave: AR DevCamp Session</a>. Â There is also a <a href="http://arwave.wiki.zoho.com/HomePage.html" target="_blank">AR Wave Wiki up now &#8211; see here</a>.</p>
<p><a href="tridarras.com/#http://www.dimitridarras.com/images/dd_work.jpg" target="_blank">Dimitri Darras </a>(avatar Dimitri Illios) is working on streaming the AR DevCampNYC sessions into Second Life,Â  <a href="http://slurl.com/secondlife/Ambleside/228/247/25" target="_blank">SLURL here</a>.</p>
<p>Thomas has done a very nice introduction and FAQ below.Â  This should help people new to this project to get up to speed quickly.</p>
<p>There are already several Waves that show the history of this project including: <a href="https://wave.google.com/wave/#restored:wave:googlewave.com%21w%252Bhvk2Fj3wB" target="_blank">AR Wave: Augmented Reality Framework Development</a>,Â  <a href="https://wave.google.com/wave/#restored:wave:googlewave.com!w%252BeyLQLb4ED" target="_blank">AR Wave Use Cases</a>, <a href="https://wave.google.com/wave/#restored:wave:googlewave.com!w%252Bok4URyFyR" target="_blank">PyGoWave AR Tech Discussion</a>,Â  <a href="https://wave.google.com/wave/#restored:wave:googlewave.com!w%252BJAcNzz16A" target="_blank">AR Wave Augmented Reality Wave Development</a>, <a href="https://wave.google.com/wave/#restored:wave:googlewave.com!w%252B0VnNxxoOB.1" target="_blank">AR Wave / Muku Organization and Admin</a>.</p>
<p>Also I have several posts for people interested in more of the background, including: <a title="Permanent Link to The Next Wave of AR: Mobile Social Interaction Right Here, Right Now!" rel="bookmark" href="../../2009/11/19/the-next-wave-of-ar-mobile-social-interaction-right-here-right-now/">The Next Wave of AR: Mobile Social Interaction Right Here, Right Now!</a>, <a href="http://www.ugotrade.com/2009/08/19/everything-everywhere-thomas-wrobels-proposal-for-an-open-augmented-reality-network/" target="_blank">AR Wave: Layers and Channels of Social Augmented Experiences</a>, <a title="Permanent Link to Total Immersion and the â€œTransfigured City:â€ Shared Augmented Realities, the â€œWeb Squared Era,â€ and Google Wave" rel="bookmark" href="../../2009/09/26/total-immersion-and-the-transfigured-city-shared-augmented-realities-the-web-squared-era-and-google-wave/">Total Immersion and the â€œTransfigured City:â€ Shared Augmented Realities, the â€œWeb Squared Era,â€ and Google Wave.</a></p>
<p>Thomas uses the term Arn (augmented reality network) which is one of the candidate names for the project, Muku (crest of a Wave) is another suggestion.Â  Thomas&#8217; intro and FAQ below can also be found <a href="http://lostagain.nl/testSite/projects/Arn/information.html" target="_blank">here</a>.</p>
<p><strong><br />
</strong></p>
<h3><strong>What is the AR Wave Project?</strong></h3>
<p><strong> </strong></p>
<p>In simple terms its a protocol for storing <a id="zblc" title="geolocated" href="http://en.wikipedia.org/wiki/Geolocation">geolocated</a> data on Wave servers that&#8217;s currently being developed.</p>
<p>We believe this will help lay the foundations for an open, universally accessible, and decentralised system for shared augmented reality overlays which various clients can connect to and use.</p>
<p>This AR Network should spark a lot more rapid adoption of AR technologies, give existing browsers more functionality, and provide the network infrastructure, allowing many of the fictional depictions of AR to become a reality one day.</p>
<p><strong>The AR Network.</strong></p>
<p>When we speak of a future AR Network, we mean one as universal and as standard as the internet. One where people can connect from any number of devices, and without additional downloads, experience the majority of the content.</p>
<p>Where people can just point their phone, webcam, or pair of AR glasses anywhere where a virtual object should be, and they will see it. The user experience is seamless, AR comes to them without them needing to â€œprepareâ€ their device for it.</p>
<p>The Arn should be an inclusive and open platform where any number of devices can connect to, and anyone can make and host their own location-specific models or data.</p>
<p>It should allow people to communicate both publicly and privately, and not have their vision constantly cluttered with things they donâ€™t want to see.</p>
<p>This is our vision, and we think a Wave protocol will help it become a reality.</p>
<p><strong>Why Wave?</strong></p>
<p>Wave allows the advantages of both real-time communication, as well as the advantages of persistent hosting of data. It is both like IRC, and like a Wiki. It allows anyone to create a Wave, and share it with anyone else. It allows Waves to be edited at the same time by many people, or used as a private reference for just one person.</p>
<p>These are all incredibly useful properties for any AR-experience, more so Wave is open. Anyone can make a server or client for Wave. Better yet, these servers will exchange data with each other, providing a seamless world for the user: a single login will let you browse the whole world of public waves, regardless of whoâ€™s providing or hosting the data. Wave is also quite scalable and secure: data is only exchanged when necessary, and will stay local to just one server if no one else needs to view it.</p>
<p>Wave allows bots to run on it and thus allowing blips in a waves to be automatically updated, created or destroyed based on any criteria the coders choose. Wave even allows the playback of all edits since the wave was created.</p>
<p>For all these reasons and a few more, Wave makes a great platform for AR.</p>
<p><strong>How?</strong></p>
<p>In basic terms, we will diverse a standard way to geolocate a bit of data and store it as aÂ <a id="u0cd" title="Blip" href="http://google.about.com/od/b/g/google_wave_blip.htm">Blip</a> within a wave.</p>
<p>This data could be a 3d mesh, a bit of text, or even a piece of audio.</p>
<p>Then various clients on various devices could logon, locate, interpret and display this data as they see fit.</p>
<p><a href="http://lostagain.nl/tempspace/PrototypeDiagram3_wave.html" target="_blank"><img class="alignnone size-medium wp-image-4962" title="Screen shot 2009-12-04 at 7.56.58 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/12/Screen-shot-2009-12-04-at-7.56.58-PM-300x168.png" alt="Screen shot 2009-12-04 at 7.56.58 PM" width="300" height="168" /></a></p>
<p><em>Click on image above to enlarge.</em></p>
<p>A typical example of this might be holding up your phone and seeing messages written by your friends and family in the locations which they are relevant.</p>
<p>You could see an arrow hovering over the cafÃ© your meeting a friend at, notes above their flat saying if they are in or out, or messages by shops telling you to pick up the particular brand of cereal they like.</p>
<p>This data would be personal to just yourself and whoever you invite to share that wave with.</p>
<p>Other forms of data could be public, like city-maps, online games, or historical landmarks being recreated. Custom views of the world with data for entertainment, commercial, environmental or informative purposes.</p>
<p>The possibilities with geolocated data are endless, as are the various ways to display and make use of them.</p>
<p>One of the things I&#8217;m most passionate about is people being able to see many different types of data, both public and private at the same time and from many different sources at once.</p>
<p>For instance, if your playing a AR game, why shouldn&#8217;t your chat window be viewable at the same time?</p>
<p>If you have skinned your environment with a custom view of the world, why shouldn&#8217;t you also see mapping or restaurant recommendations?</p>
<p>The ways to present these layers of data and toggle them on/off in the most intuitive and flexible ways would be a task for the client markers, and I&#8217;m sure we will see many innovations in those areas.</p>
<p>But by using Wave it at least provides the framework for having multiple information sources controlled by many different people yet accessible, and user-submittable, via the same protocol.</p>
<p><strong>Who?</strong></p>
<p>This idea first sprouted from a paper I route focusing on the potential for IRC to be used for AR;</p>
<p><a id="ig44" title="http://www.lostagain.nl/testSite/projects/Arn/AR_paper.pdf" href="http://www.lostagain.nl/testSite/projects/Arn/AR_paper.pdf">http://www.lostagain.nl/testSite/projects/Arn/AR_paper.pdf</a></p>
<p>I suggested near the end Wave might be a better alternative (using Google Wave was an idea Tish Shute, Ugotrade, brought up in response to the Arn prototype design on IRC), and it quickly became apparent that Wave was a very suitable medium.</p>
<p>Since then, there was a lot of interest, and numerous people have offered to help.</p>
<p>In particular, recently, the <a id="vms1" title="PygoWave" href="http://pygowave.net/blog/">PygoWave</a> team is helping us out, as they have an existing server supporting c/s protocol, which is currently being actively developed.</p>
<p><strong>Where?</strong></p>
<p>You can join the general discussion here;<br />
<a id="wvja" title="Augmented Reality Wave Development" href="https://wave.google.com/wave/#restored:wave:googlewave.com%21w%252BJAcNzz16A">Augmented Reality Wave Development</a></p>
<p>Technical side here;<br />
<a id="qw95" title="Augmented Reality Wave Framework Development" href="https://wave.google.com/wave/#restored:wave:googlewave.com%21w%252Bhvk2Fj3wB">Augmented Reality Wave Framework Development</a></p>
<p><strong>When?</strong></p>
<p>There&#8217;s lots still to do, and we are at an early stage.</p>
<p>Our current targets: (last updated 11/12/2009)</p>
<ul>
<li>Getting reading/writing of prototype ARBlips to the PygoWave sever. (the PygoWave team have already made a standalone client and have the protocol for this sorted!)</li>
<li>Establishing a minimal spec for ARBlips to be later expanded.</li>
<li>Writing a very simple prototype online client showing how to store/retrieve the data.</li>
<li>Expanding client to work for some use-cases.</li>
<li>Establish a logo/branding for the project.</li>
</ul>
<p><strong>Other FAQs.</strong></p>
<p><strong>Where&#8217;s the catch?</strong></p>
<p>While we believe Wave is highly suitable for development, it has the drawbacks of being a new system with just a few servers worldwide, which (at the time of writing this), have not yet been federated together yet.</p>
<p>Naturally, as a new technology, its likely to have some growing pains. And building a new technology on other new technology will multiply that somewhat. The first pain is the lack of a standard client / sever protocol. PygoWave have stepped in to the rescue a bit here, by being not just one of the most developed Wave server other then Google, but also leaping ahead with support for Json based c/s interaction. Google has stated they want community to take the lead on on a c/s protocol, so we are hoping they will adopt a Json variant, or a XMPP one and add it to the spec. We hope in much the same way as POP3/IMAP have been a standard for email server interaction, a similar one will develop for Wave.</p>
<p>In the meantime we plan to keep the code for writing ARBlips somewhat abstracted so as to make it easy to adapt in future.</p>
<p>As for the newness of Wave and other potential problems it will bring, we aren&#8217;t that worried as its built on <a id="jnw1" title="XMPP" href="http://en.wikipedia.org/wiki/XMPP">XMPP</a>, which has proved reliable already.</p>
<p>The other catch is we are unfunded, which slows development down considerable as we have to fit it around our other jobs.</p>
<p><strong>I&#8217;m making my own AR Browser, and am slightly interested in maybe supporting you.</strong></p>
<p>We are naturally very keen for support, and particularly for those with skills and visions to give feedback on the proposed protocol. Specifically: what do you want stored in a blip?</p>
<p>That&#8217;s what&#8217;s important at this stage.</p>
<p>We don&#8217;t see the Arn as a replacement for existing browser systems at the moment. We don&#8217;t want to restrict innovation or development in this fast developing market as we are very impressed at what&#8217;s been achieved so far. In many ways our task is small in comparison to what&#8217;s already accomplished.</p>
<p>However, we do believe the Arn will make a good addition to existing browser systems. It will allow users contribute data and have social features without having to worry about accounts or hosting.</p>
<p>It will still be quite some work to support; new GUIs will need to be developed to make it easy to submit data from the devices, as well as to login to waves.</p>
<p>However, we hope over time to build a set of example libs to make the read/writing of ARBlips as as easy as possible to implement in your software.</p>
<p>Perhaps a good way to think about it is existing AR Browsers are like word-processors, supporting the Arn will be like adding support for *.txt, but doesn&#8217;t limit what you can do with your own format.</p>
<p><em>Eventually</em> we do hope ARBlips hosted on Wave will become the majority of AR data, and its functionality will be analogous to the internet is today. We truly believe in the long run a standard is essential.</p>
<p>But for now we think merely getting a baseline format established for how AR data can be communicated will increase user-ability, usefulness, and help the market grow.</p>
<p><strong>Can I help?</strong></p>
<p>Sure.</p>
<p>We particularly need people with technical skills in relevant fields. (both gwt/javascript web programming and c++(/qt)standalone programming help very welcome!).</p>
<p>But we also welcome people just with vision to help focus use-cases and to conceptualise what we want to be able to do with the system.</p>
<p>Please either join the relevant AR Waves or <a href="http://arwave.wiki.zoho.com/HomePage.html">Wiki</a></p>
<p>We are especially interested in those with JSON and Comet experience. Specifically those with the abilities to make standalone applications to read/write to a sever using these methods.</p>
<p><strong>What type of data will a AR Blip store?</strong></p>
<p>This is still actively being decided, but essentially its a physical hyperlink.</p>
<p>A connection between a physical location (or object, see below) and a piece of data.</p>
<p>Specifically, we are thinking about the following fields;</p>
<p>Location in X,Y,Z,<br />
Coordinate System used for the above,<br />
Orientation,<br />
MIMEType <span style="color: #666666;">[the type of data stored]</span><br />
DataItself <span style="color: #666666;">[either a http link for 3d meshs and other larger data, or an inline text string if its just a comment]</span><br />
DataUpdateTimestamp <span style="color: #666666;">[so clients know if its necessary redownload]</span><br />
Editors <span style="background-color: #ffffff;"><span style="background-color: #666666;"><span style="background-color: #ffffff;"><span style="background-color: #666666;"><span style="color: #666666;"><span style="background-color: #ffffff;">[the user/s that edited/created this blip]</span></span></span></span></span></span><br />
ReferanceLink <span style="color: #666666;">[data needed to tie the object at a non-fixed location, such as an image to align it to an object in realtime],</span><br />
Metatags <span style="color: #666666;">[to describe the data]</span></p>
<p><strong>Are you purely tying stuff to fixed geolocations?</strong></p>
<p>Certainly not <img src="http://www.ugotrade.com/wordpress/wp-includes/images/smilies/icon_smile.gif" alt=":)" class="wp-smiley" /><br />
As part of of the spec we wish to be able for people to be able to link data to dynamically moving objects, trackable by image or other methods.</p>
<p>The idea being that one day someone could link a piece of text or 3d mesh to an image on a t-shirt they are wearing, or perhaps link a dynamically updating twitter feed, or perhaps provide information on a product (based on its logo).</p>
<p>There&#8217;s a large number of possibility&#8217;s for image-based linking alone, and that&#8217;s not even considering possibilities like linking RFIDs, or other forms of less precise but invisible binding data.</p>
<p>We need a lot of feedback from those companies already doing markless tracking. What types of images do you need, idly to link a mesh to an object? is one enough?</p>
<h3><strong>Summary of AR Wave Work to Date</strong></h3>
<p><strong>Purpose:</strong> To provide an open, distributed, and universally accessible platform for augmented reality. To allow the creation of augmented reality content to be as simple as making an html page, or contributing to a wiki.</p>
<p><strong>Specific Goal:</strong> To establish a method for geolocating digital data in physical space (or linking it to physical objects) using wave as a platform.</p>
<p>(For justification as to why we are using Wave see: <a href="http://lostagain.nl/testSite/projects/Arn/information.html" target="_blank">our faq</a> )</p>
<p><strong>Wave as a platform</strong></p>
<p>We are developing on the <a title="PyGoWave" href="http://code.google.com/p/pygowave-server/" target="_blank">PyGoWave</a> server at the moment but the goal is to be compatible with all Wave servers</p>
<p>PyGoWave has already achieved an important aspect in enabling the project in being a waveserver with a working and well documented server protocol. This allows both standalone and webbased clients to interface with it already.Â  See -Â <a href="http://github.com/p2k/pygowave-qt">The PyGoWave Qt-Based Desktop Client</a></p>
<p>This is one of the reasons why we have chosen to develop for the Pygo server at this stage.</p>
<p>However, the overall goal of AR Wave is to have a framework compatible with all servers using the Wave Federation Protocol. As more wave servers get c/s protocols then ARblips (the data needed to geolocate objects) could be posted and retrieved from various servers using the same client software. For this a standard should emerge. Just as websites don&#8217;t have to be hosted on specific servers, neither should AR data need to be hosted on specific wave servers.</p>
<p>In order to reach our goal, there are a few very achievable steps involved &#8211; see below.</p>
<p><strong>Feedback</strong></p>
<p>We are still actively seeking feedback, so feel free to join the <a href="https://wave.google.com/wave/#restored:wave:googlewave.com%21w%252Bhvk2Fj3wB">Wave discussions, </a>and see the history of how the specifications of the protocol evolved. You can also read the justification for some of the choices already made. Note a new discussion for AR DevCamp will be begin at <a href="https://wave.google.com/wave/#restored:wave:googlewave.com%21w%252BH83lcj6RA">AR Wave: AR DevCamp Session</a></p>
<p>This will, of course, only be the first draft of the specification, and it is sure to develop much in future.<br />
The important thing now is to make working prototypes while maintaining flexibility.</p>
<p>So what do we need to do?</p>
<p><strong>Steps :</strong></p>
<p><strong>* Establish the overall method &#8211; Done.</strong></p>
<p>Each Wave will be a layer on reality which an individual or a group can create.Â  Each Blip in this Wave refers to either a small piece of inline data (like text) or a remote piece of larger data (like a 3D mesh) as well as the data needed to pin-point it in either relative or absolute real space.<br />
We call these blips: ARblips. They are simply blips that stored the data necessary to augment a single object onto a specific bit reality.</p>
<p>It is up to the clients how they interpret and display the data. They could interpret it as a simple 2d list of nearby objects, or as an advanced 3D overlay, whereby multiple waves from different sources could to be viewed at once. Whatâ€™s important is that there is a standard way to link the digital data to the real world space.</p>
<p>* Establishing the specification for the ARblip &#8211; In progress<br />
We have a good idea of whatâ€™s needed to be stored in an ARblip, and we have hammered out a rough format.<br />
The data might be stored as blip-annotations, but this has yet to be finalised.<br />
A rough outline of the type of data stored can be seen in this c++/qt header for ARblip data can be seen at the end of this document.</p>
<p>* Storing and retrieving these pieces of ARblip data on the PyGo server &#8211; In progress.<br />
The Pygowave team has made some excellent libraries that should make reading and writing data on the PyGoWave server very trivial for those with c++ skills.<br />
This, however, is a real critical step, so more developers with C++ skills are very welcome!</p>
<p>* Making the above client mobile, and using a devices gps device to place the data. &#8211; Not started.<br />
The next step would be to port the code to a mobile phone and use it&#8217;s gps-inputÂ  to post geolocated data and view what others have posted. This would be a fairly simple and not to useful app in itself. However, it would mark the first time anyone could post AR data and anyone could view it, all using open-source infrastructure.<br />
As a bonus, because we are using wave infrastructure, the updates to any ARblip should appear in near realtime.</p>
<p>* To continue with the proof of concept, we would like to have simultaneous wave input from a PC<br />
and mobile phone at the same time. &#8211; Not started.<br />
For example, someone could post a pin on Google maps API and have that data posted to a ARBlip in a wave. Someone logged into that wave on their mobile device would then see the data posted appear.<br />
More so we hope that when the Google map pin is dragged about, the mobile phone viewer, with just a few seconds lag, will see its location updated in real time.</p>
<p>We hope to make a modest yet practical app at this stage.</p>
<p>* After all this, we can go onto the interesting things:<br />
3D data, camera-overlays, data fixed to objects and many more.Â  There&#8217;s plenty of existing software using these features (such as Wikitude, Layer) and some that are even open source software (like Gamaray and Flashkit). The open source code can give us a leg-up. However, we prefer to establish the protocol first. So naturally, these fancy features aren&#8217;t a priority for us. Rather we think our energy is better spent establishing the protocols and infrastructure so that other people can build more advanced bit of software easier.</p>
<p>However, once our primary goals are established, we will look to make a open source augmented reality browser ourself which will surely feature many of these features.</p>
<p>Overall, we hope once we have a simple proof of concept, there will be many groups, both existing and new, wanting to use this Wave system for their own apps, games and data.</p>
<p><strong>Conclusion</strong>:<br />
Really it&#8217;s now all about growing the community. We hope as soon as we show how great Wave can be for augmented reality, that lots of individuals and teams will start making their own clients to read/write geolocated data.<br />
Overall we don&#8217;t think anything we make will be that impressive in itself. That&#8217;s not our goal.<br />
We instead hope that our project will enable AR-content to be made as easily as web content. That games, information and apps will be able to be created without the creators having to worry<br />
about the infrastructure behind it.</p>
<p><strong>Technical information -</strong><strong> </strong></p>
<p><strong><br />
</strong><strong>Current ARBlip header file</strong></p>
<p>(below is a c++/qt header file for an ARBlip object that should illustrate the data being stored)</p>
<hr />class <strong>arblip</strong></p>
<p>{</p>
<p align="left"><strong>public</strong>:</p>
<p align="left">arblip();</p>
<p>~arblip();</p>
<p>arblip(QString,QString,double,double,double,int,int,int,QString);</p>
<p>QString getDataAsString();</p>
<p>QString getEditors();</p>
<p>QString getRefID();</p>
<p>QString getXAsString();</p>
<p>QString getYAsString();</p>
<p>QString getZAsString();Â bool isFaceingSprite();Â <strong> </strong></p>
<p><strong><br />
private</strong>:</p>
<p>//ID reference. This would be a unique identifier for the blip. Presumably the same as Wave uses itself.</p>
<p>QString ReferanceID;</p>
<p>//Last editor(s)</p>
<p>QString Editors;</p>
<p>int PermissionFlags = 68356; Â // default 664 octal = rw-rw-r&#8211;</p>
<p>//Location</p>
<p>double Xpos;Â Â  // left/right</p>
<p>double Ypos;Â Â  // up/down</p>
<p>double Zpos;Â  // front/back</p>
<p>//Orientation</p>
<p>// names, ranges and directions are taken from aeronautics.</p>
<p>// If no orientation is specified, itâ€™s assumed to be a facing sprite.</p>
<p>// Roll: rotation around the front to back (z) axis. (Lean left or right.)</p>
<p>// range +/- 180 degrees with + values moving the objects right side down.</p>
<p>int Roll;</p>
<p>// Pitch: rotation around the left to right (x) axis. (tilt up or down)</p>
<p>// Range +/- 90 degrees with + values moving the objects front up. (looking up)</p>
<p>int Pitch;</p>
<p>// Yaw: rotation around the vertical (y) axis. (turn left or right.)</p>
<p>// range +/- 180 degrees with + values moving the objects face to its right.</p>
<p>int Yaw;</p>
<p>bool FacingSprite; //if no rotation specified, this should default to true</p>
<p>//if set to true when a rotation is set, then it keeps that rotation relative to the viewer</p>
<p>//not relative to the earth.</p>
<p>//Data format</p>
<p>QString DataMIME;</p>
<p>QString CordinateSystemUsed; //The co-ordinate system used. This should be a string representing a Open Geospatial Consortium standard. This could be earth-relative for gps co-ordinates, or in some cases relative to the viewer, for data to be displayed in a HUD like style.</p>
<p>//Data itself</p>
<p>QString Data;</p>
<p>QString DataUpdatedTimestamp; //Time the Data was updated changed</p>
<p align="left">//Note; A seperate timestamp should be used for updates that dont effect the data itself.<br />
//(such as if a 3d object moves, but its mesh isnt changed)</p>
<p>//Data metadataÂ QMap&lt;QString, QString&gt; Metadata;</p>
<p>};</p>
]]></content:encoded>
			<wfw:commentRss>http://www.ugotrade.com/2009/12/04/ar-wave-project-an-introduction-and-faq-by-thomas-wrobel/feed/</wfw:commentRss>
		<slash:comments>3</slash:comments>
		</item>
		<item>
		<title>The Next Wave of AR: Mobile Social Interaction Right Here, Right Now!</title>
		<link>http://www.ugotrade.com/2009/11/19/the-next-wave-of-ar-mobile-social-interaction-right-here-right-now/</link>
		<comments>http://www.ugotrade.com/2009/11/19/the-next-wave-of-ar-mobile-social-interaction-right-here-right-now/#comments</comments>
		<pubDate>Fri, 20 Nov 2009 04:53:07 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Artificial general Intelligence]]></category>
		<category><![CDATA[Artificial Intelligence]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Ecological Intelligence]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[message brokers and sensors]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[Mobile Technology]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[online privacy]]></category>
		<category><![CDATA[open source]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[privacy and online identity]]></category>
		<category><![CDATA[smart appliances]]></category>
		<category><![CDATA[Smart Devices]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[World 2.0]]></category>
		<category><![CDATA[AR browsers]]></category>
		<category><![CDATA[AR Dev camp]]></category>
		<category><![CDATA[AR Wave]]></category>
		<category><![CDATA[calo]]></category>
		<category><![CDATA[mobile social]]></category>
		<category><![CDATA[mobile social interaction utility]]></category>
		<category><![CDATA[open distributed augmented reality]]></category>
		<category><![CDATA[pygowave]]></category>
		<category><![CDATA[real time internet]]></category>
		<category><![CDATA[siri]]></category>
		<category><![CDATA[smart things]]></category>
		<category><![CDATA[social augmented experiences]]></category>
		<category><![CDATA[social augmented reality]]></category>
		<category><![CDATA[The Copenhagen Wheel]]></category>
		<category><![CDATA[the internet of things]]></category>
		<category><![CDATA[the outernet]]></category>
		<category><![CDATA[the sentient city]]></category>
		<category><![CDATA[Wave Federation Protocol]]></category>
		<category><![CDATA[Web Squared]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=4869</guid>
		<description><![CDATA[The Next Wave of AR: Mobile Social Interaction, Right Here, Right Now! View more presentations from Tish Shute. Click on the image below or here to watch this presentation and others from Momo13]]></description>
				<content:encoded><![CDATA[<div id="__ss_2542526" style="width: 425px; text-align: left;"><a style="font:14px Helvetica,Arial,Sans-serif;display:block;margin:12px 0 3px 0;text-decoration:underline;" title="The Next Wave of AR: Mobile Social Interaction, Right Here, Right Now!" href="http://www.slideshare.net/TishShute/the-next-wave-of-ar-mobile-social-interaction-right-here-right-now-2542526">The Next Wave of AR: Mobile Social Interaction, Right Here, Right Now!</a><object style="margin:0px" classid="clsid:d27cdb6e-ae6d-11cf-96b8-444553540000" width="425" height="355" codebase="http://download.macromedia.com/pub/shockwave/cabs/flash/swflash.cab#version=6,0,40,0"><param name="allowFullScreen" value="true" /><param name="allowScriptAccess" value="always" /><param name="src" value="http://static.slidesharecdn.com/swf/ssplayer2.swf?doc=thenextwaveofar2-091120000046-phpapp01&amp;stripped_title=the-next-wave-of-ar-mobile-social-interaction-right-here-right-now-2542526" /><param name="allowfullscreen" value="true" /><embed style="margin:0px" type="application/x-shockwave-flash" width="425" height="355" src="http://static.slidesharecdn.com/swf/ssplayer2.swf?doc=thenextwaveofar2-091120000046-phpapp01&amp;stripped_title=the-next-wave-of-ar-mobile-social-interaction-right-here-right-now-2542526" allowscriptaccess="always" allowfullscreen="true"></embed></object></p>
<div style="font-size: 11px; font-family: tahoma,arial; height: 26px; padding-top: 2px;">View more <a style="text-decoration:underline;" href="http://www.slideshare.net/">presentations</a> from <a style="text-decoration:underline;" href="http://www.slideshare.net/TishShute">Tish Shute</a>.</div>
<p>Click on the image below or <a href="http://www.mobilemonday.nl/talks/tish-shute-the-next-wave-of-ar/" target="_blank">here to watch</a> this presentation and others from <a href="http://www.mobilemonday.nl/">Momo13</a></div>
<p><a href="http://www.mobilemonday.nl/talks/tish-shute-the-next-wave-of-ar/" target="_blank"><img class="alignnone size-medium wp-image-4876" title="Screen shot 2009-11-20 at 1.32.24 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/Screen-shot-2009-11-20-at-1.32.24-PM-300x167.png" alt="Screen shot 2009-11-20 at 1.32.24 PM" width="300" height="167" /></a></p>
]]></content:encoded>
			<wfw:commentRss>http://www.ugotrade.com/2009/11/19/the-next-wave-of-ar-mobile-social-interaction-right-here-right-now/feed/</wfw:commentRss>
		<slash:comments>4</slash:comments>
		</item>
	</channel>
</rss>
